blob: 57dcbc700063421ae784bdcc07897b867c8e49f1 [file] [log] [blame]
Akron414ec952020-08-03 15:48:43 +02001#use strict;
Akron9c0488f2016-01-28 14:17:15 +01002use warnings;
3use Test::More;
4use Data::Dumper;
5use JSON::XS;
6
Akronfab17d32020-07-31 14:38:29 +02007if ($ENV{SKIP_REAL}) {
8 plan skip_all => 'Skip real tests';
9};
10
Akron9c0488f2016-01-28 14:17:15 +010011use Benchmark qw/:hireswallclock/;
12
13my $t = Benchmark->new;
14
15use utf8;
16use lib 'lib', '../lib';
17
18use File::Basename 'dirname';
19use File::Spec::Functions 'catdir';
20
Akrone4c2e412016-01-28 15:10:50 +010021use_ok('KorAP::XML::Krill');
Akron9c0488f2016-01-28 14:17:15 +010022
23# GOE/AGA/03828
Akron414ec952020-08-03 15:48:43 +020024my $path = catdir(dirname(__FILE__), 'corpus','WDD','G27','38989');
Akron9c0488f2016-01-28 14:17:15 +010025
Akrone4c2e412016-01-28 15:10:50 +010026ok(my $doc = KorAP::XML::Krill->new( path => $path . '/' ), 'Load Korap::Document');
Akron9c0488f2016-01-28 14:17:15 +010027ok($doc->parse, 'Parse document');
28
Akron1cd5b872016-03-22 00:23:46 +010029is($doc->text_sigle, 'WDD11/G27/38989', 'Correct text sigle');
30is($doc->doc_sigle, 'WDD11/G27', 'Correct document sigle');
Akron9c0488f2016-01-28 14:17:15 +010031is($doc->corpus_sigle, 'WDD11', 'Correct corpus sigle');
32
Akron35db6e32016-03-17 22:42:22 +010033my $meta = $doc->meta;
Akron5eb3aa02019-01-25 18:30:47 +010034is($meta->{T_title}, 'Diskussion:Gunter A. Pilz', 'Title');
35ok(!$meta->{T_sub_title}, 'No SubTitle');
36is($meta->{T_author}, '€pa, u.a.', 'Author');
37is($meta->{A_editor}, 'wikipedia.org', 'Editor');
Akron9c0488f2016-01-28 14:17:15 +010038
Akron5eb3aa02019-01-25 18:30:47 +010039is($meta->{S_pub_place}, 'URL:http://de.wikipedia.org', 'PubPlace');
40is($meta->{A_publisher}, 'Wikipedia', 'Publisher');
41is($meta->{S_text_type}, 'Diskussionen zu Enzyklopädie-Artikeln', 'Correct Text Type');
42ok(!$meta->{S_text_type_art}, 'Correct Text Type Art');
43ok(!$meta->{S_text_type_ref}, 'Correct Text Type Ref');
44ok(!$meta->{S_text_domain}, 'Correct Text Domain');
45is($meta->{D_creation_date}, '20070707', 'Creation date');
46is($meta->{S_availability}, 'CC-BY-SA', 'License');
Akron35db6e32016-03-17 22:42:22 +010047ok(!$meta->{pages}, 'Pages');
Akron5eb3aa02019-01-25 18:30:47 +010048ok(!$meta->{A_file_edition_statement}, 'File Statement');
49ok(!$meta->{A_bibl_edition_statement}, 'Bibl Statement');
50is($meta->{A_reference} . "\n", <<'REF', 'Reference');
Akron9c0488f2016-01-28 14:17:15 +010051Diskussion:Gunter A. Pilz, In: Wikipedia - URL:http://de.wikipedia.org/wiki/Diskussion:Gunter_A._Pilz: Wikipedia, 2007
52REF
Akron5eb3aa02019-01-25 18:30:47 +010053is($meta->{S_language}, 'de', 'Language');
Akron9c0488f2016-01-28 14:17:15 +010054
Akron6bf3cc92019-02-07 12:11:20 +010055is($meta->{A_externalLink}, 'data:application/x.korap-link;title=Wikipedia,http://de.wikipedia.org/wiki/Diskussion:Gunter_A._Pilz', 'link');
56
Akron5eb3aa02019-01-25 18:30:47 +010057is($meta->{T_corpus_title}, 'Wikipedia', 'Correct Corpus title');
58ok(!$meta->{T_corpus_sub_title}, 'Correct Corpus sub title');
59ok(!$meta->{T_corpus_author}, 'Correct Corpus author');
60is($meta->{A_corpus_editor}, 'wikipedia.org', 'Correct Corpus editor');
Akron9c0488f2016-01-28 14:17:15 +010061
Akron5eb3aa02019-01-25 18:30:47 +010062is($meta->{T_doc_title}, 'Wikipedia, Diskussionen zu Artikeln mit Anfangsbuchstabe G, Teil 27', 'Correct Doc title');
63ok(!$meta->{T_doc_sub_title}, 'Correct Doc sub title');
64ok(!$meta->{T_doc_author}, 'Correct Doc author');
65ok(!$meta->{A_doc_editor}, 'Correct doc editor');
Akron9c0488f2016-01-28 14:17:15 +010066
67# Tokenization
Akrone4c2e412016-01-28 15:10:50 +010068use_ok('KorAP::XML::Tokenizer');
Akron9c0488f2016-01-28 14:17:15 +010069
70my ($token_base_foundry, $token_base_layer) = (qw/OpenNLP Tokens/);
71
72# Get tokenization
Akrone4c2e412016-01-28 15:10:50 +010073my $tokens = KorAP::XML::Tokenizer->new(
Akron9c0488f2016-01-28 14:17:15 +010074 path => $doc->path,
75 doc => $doc,
76 foundry => $token_base_foundry,
77 layer => $token_base_layer,
78 name => 'tokens'
79);
80ok($tokens, 'Token Object is fine');
81ok($tokens->parse, 'Token parsing is fine');
82
83my $output = decode_json( $tokens->to_json );
84
85is(substr($output->{data}->{text}, 0, 100), '{{War Löschkandidat|6. Juli 2007|(erl., bleibt)}}', 'Primary Data');
86is($output->{data}->{name}, 'tokens', 'tokenName');
87is($output->{data}->{tokenSource}, 'opennlp#tokens', 'tokenSource');
88is($output->{version}, '0.03', 'version');
89is($output->{data}->{foundries}, '', 'Foundries');
90is($output->{data}->{layerInfos}, '', 'layerInfos');
Akron2d83a5a2016-02-26 00:21:16 +010091is($output->{data}->{stream}->[0]->[4], 's:{War', 'data');
Akron9c0488f2016-01-28 14:17:15 +010092
Akron1cd5b872016-03-22 00:23:46 +010093is($output->{textSigle}, 'WDD11/G27/38989', 'Correct text sigle');
94is($output->{docSigle}, 'WDD11/G27', 'Correct document sigle');
Akron9c0488f2016-01-28 14:17:15 +010095is($output->{corpusSigle}, 'WDD11', 'Correct corpus sigle');
96
97is($output->{title}, 'Diskussion:Gunter A. Pilz', 'Title');
98ok(!$output->{subTitle}, 'No SubTitle');
99is($output->{author}, '€pa, u.a.', 'Author');
Akron7e2eb882017-01-18 17:28:07 +0100100is($output->{editor}, 'wikipedia.org', 'Editor');
Akron9c0488f2016-01-28 14:17:15 +0100101
102is($output->{pubPlace}, 'URL:http://de.wikipedia.org', 'PubPlace');
103is($output->{publisher}, 'Wikipedia', 'Publisher');
104is($output->{textType}, 'Diskussionen zu Enzyklopädie-Artikeln', 'Correct Text Type');
105ok(!$output->{textTypeArt}, 'Correct Text Type Art');
106ok(!$output->{textTypeRef}, 'Correct Text Type Ref');
107ok(!$output->{textDomain}, 'Correct Text Domain');
108is($output->{creationDate}, '20070707', 'Creation date');
Akron6396c302016-03-18 16:05:39 +0100109is($output->{availability}, 'CC-BY-SA', 'License');
Akron9c0488f2016-01-28 14:17:15 +0100110ok(!$output->{pages}, 'Pages');
111ok(!$output->{fileEditionStatement}, 'File Statement');
112ok(!$output->{biblEditionStatement}, 'Bibl Statement');
113is($output->{reference} . "\n", <<'REF', 'Reference');
114Diskussion:Gunter A. Pilz, In: Wikipedia - URL:http://de.wikipedia.org/wiki/Diskussion:Gunter_A._Pilz: Wikipedia, 2007
115REF
116is($output->{language}, 'de', 'Language');
117
118is($output->{corpusTitle}, 'Wikipedia', 'Correct Corpus title');
119ok(!$output->{corpusSubTitle}, 'Correct Corpus sub title');
120ok(!$output->{corpusAuthor}, 'Correct Corpus author');
121is($output->{corpusEditor}, 'wikipedia.org', 'Correct Corpus editor');
122
123is($output->{docTitle}, 'Wikipedia, Diskussionen zu Artikeln mit Anfangsbuchstabe G, Teil 27', 'Correct Doc title');
124ok(!$output->{docSubTitle}, 'Correct Doc sub title');
125ok(!$output->{docAuthor}, 'Correct Doc author');
126ok(!$output->{docEditor}, 'Correct doc editor');
127
128## Base
129$tokens->add('Base', 'Sentences');
130
131$tokens->add('Base', 'Paragraphs');
132
133$output = decode_json( $tokens->to_json );
134
135is($output->{data}->{foundries}, 'base base/paragraphs base/sentences', 'Foundries');
136is($output->{data}->{layerInfos}, 'base/s=spans', 'layerInfos');
137my $first_token = join('||', @{$output->{data}->{stream}->[0]});
Akrona9455ec2016-02-14 01:42:20 +0100138like($first_token, qr/s:\{War/, 'data');
Akron9c0488f2016-01-28 14:17:15 +0100139like($first_token, qr/_0\$<i>1<i>5/, 'data');
140
141
142## OpenNLP
143$tokens->add('OpenNLP', 'Sentences');
144
145$output = decode_json( $tokens->to_json );
146is($output->{data}->{foundries},
147 'base base/paragraphs base/sentences opennlp opennlp/sentences',
148 'Foundries');
149is($output->{data}->{layerInfos}, 'base/s=spans opennlp/s=spans', 'layerInfos');
150
151
152$tokens->add('OpenNLP', 'Morpho');
153$output = decode_json( $tokens->to_json );
154is($output->{data}->{foundries},
155 'base base/paragraphs base/sentences opennlp opennlp/morpho opennlp/sentences',
156 'Foundries');
157is($output->{data}->{layerInfos}, 'base/s=spans opennlp/p=tokens opennlp/s=spans', 'layerInfos');
158
159
160## Treetagger
161$tokens->add('TreeTagger', 'Sentences');
162$output = decode_json( $tokens->to_json );
163is($output->{data}->{foundries},
164 'base base/paragraphs base/sentences opennlp opennlp/morpho opennlp/sentences treetagger treetagger/sentences',
165 'Foundries');
166is($output->{data}->{layerInfos}, 'base/s=spans opennlp/p=tokens opennlp/s=spans tt/s=spans', 'layerInfos');
167
168$tokens->add('TreeTagger', 'Morpho');
169$output = decode_json( $tokens->to_json );
170is($output->{data}->{foundries},
171 'base base/paragraphs base/sentences opennlp opennlp/morpho opennlp/sentences treetagger treetagger/morpho treetagger/sentences',
172 'Foundries');
173is($output->{data}->{layerInfos}, 'base/s=spans opennlp/p=tokens opennlp/s=spans tt/l=tokens tt/p=tokens tt/s=spans', 'layerInfos');
174
175## CoreNLP
176{
177 local $SIG{__WARN__} = sub {};
178 $tokens->add('CoreNLP', 'NamedEntities');
179};
180$output = decode_json( $tokens->to_json );
181is($output->{data}->{foundries},
182 'base base/paragraphs base/sentences opennlp opennlp/morpho opennlp/sentences treetagger treetagger/morpho treetagger/sentences',
183 'Foundries');
184is($output->{data}->{layerInfos}, 'base/s=spans opennlp/p=tokens opennlp/s=spans tt/l=tokens tt/p=tokens tt/s=spans', 'layerInfos');
185
186
187{
188 local $SIG{__WARN__} = sub {};
189 $tokens->add('CoreNLP', 'Sentences');
190};
191$output = decode_json( $tokens->to_json );
192is($output->{data}->{foundries},
193 'base base/paragraphs base/sentences opennlp opennlp/morpho opennlp/sentences treetagger treetagger/morpho treetagger/sentences',
194 'Foundries');
195is($output->{data}->{layerInfos}, 'base/s=spans opennlp/p=tokens opennlp/s=spans tt/l=tokens tt/p=tokens tt/s=spans', 'layerInfos');
196
197{
198 local $SIG{__WARN__} = sub {};
199 $tokens->add('CoreNLP', 'Morpho');
200};
201$output = decode_json( $tokens->to_json );
202unlike($output->{data}->{foundries}, qr!corenlp/morpho!, 'Foundries');
203unlike($output->{data}->{layerInfos}, qr!corenlp/p=tokens!, 'layerInfos');
204
205{
206 local $SIG{__WARN__} = sub {};
207 $tokens->add('CoreNLP', 'Constituency');
208};
209$output = decode_json( $tokens->to_json );
210unlike($output->{data}->{foundries}, qr!corenlp/constituency!, 'Foundries');
211unlike($output->{data}->{layerInfos}, qr!corenlp/c=spans!, 'layerInfos');
212
213## Glemm
214{
215 local $SIG{__WARN__} = sub {};
216 $tokens->add('Glemm', 'Morpho');
217};
218$output = decode_json( $tokens->to_json );
219unlike($output->{data}->{foundries}, qr!glemm/morpho!, 'Foundries');
220unlike($output->{data}->{layerInfos}, qr!glemm/l=tokens!, 'layerInfos');
221
222## Connexor
223$tokens->add('Connexor', 'Sentences');
224$output = decode_json( $tokens->to_json );
225like($output->{data}->{foundries}, qr!connexor/sentences!, 'Foundries');
226like($output->{data}->{layerInfos}, qr!cnx/s=spans!, 'layerInfos');
227
228$tokens->add('Connexor', 'Morpho');
229$output = decode_json( $tokens->to_json );
230like($output->{data}->{foundries}, qr!connexor/morpho!, 'Foundries');
231like($output->{data}->{layerInfos}, qr!cnx/p=tokens!, 'layerInfos');
232like($output->{data}->{layerInfos}, qr!cnx/l=tokens!, 'layerInfos');
233like($output->{data}->{layerInfos}, qr!cnx/m=tokens!, 'layerInfos');
234
235$tokens->add('Connexor', 'Phrase');
236$output = decode_json( $tokens->to_json );
237like($output->{data}->{foundries}, qr!connexor/phrase!, 'Foundries');
238like($output->{data}->{layerInfos}, qr!cnx/c=spans!, 'layerInfos');
239
240$tokens->add('Connexor', 'Syntax');
241$output = decode_json( $tokens->to_json );
242like($output->{data}->{foundries}, qr!connexor/syntax!, 'Foundries');
243like($output->{data}->{layerInfos}, qr!cnx/syn=tokens!, 'layerInfos');
244
245## Mate
246$tokens->add('Mate', 'Morpho');
247$output = decode_json( $tokens->to_json );
248like($output->{data}->{foundries}, qr!mate/morpho!, 'Foundries');
249like($output->{data}->{layerInfos}, qr!mate/p=tokens!, 'layerInfos');
250like($output->{data}->{layerInfos}, qr!mate/l=tokens!, 'layerInfos');
251like($output->{data}->{layerInfos}, qr!mate/m=tokens!, 'layerInfos');
252
253# diag "No test for mate dependency";
254
255## XIP
256$tokens->add('XIP', 'Sentences');
257$output = decode_json( $tokens->to_json );
258like($output->{data}->{foundries}, qr!xip/sentences!, 'Foundries');
259like($output->{data}->{layerInfos}, qr!xip/s=spans!, 'layerInfos');
260
261$tokens->add('XIP', 'Morpho');
262$output = decode_json( $tokens->to_json );
263like($output->{data}->{foundries}, qr!xip/morpho!, 'Foundries');
264like($output->{data}->{layerInfos}, qr!xip/l=tokens!, 'layerInfos');
265like($output->{data}->{layerInfos}, qr!xip/p=tokens!, 'layerInfos');
266
267$tokens->add('XIP', 'Constituency');
268$output = decode_json( $tokens->to_json );
269like($output->{data}->{foundries}, qr!xip/constituency!, 'Foundries');
270like($output->{data}->{layerInfos}, qr!xip/c=spans!, 'layerInfos');
271
272# diag "No test for xip dependency";
273
Akron414ec952020-08-03 15:48:43 +0200274$path = catdir(dirname(__FILE__), 'corpus','WDD15','A79','83946');
Akron89df4fa2016-11-04 14:35:37 +0100275
276ok($doc = KorAP::XML::Krill->new( path => $path . '/' ), 'Load Korap::Document');
277ok($doc->parse, 'Parse document');
278
279is($doc->text_sigle, 'WDD15/A79/83946', 'Correct text sigle');
280is($doc->doc_sigle, 'WDD15/A79', 'Correct document sigle');
281is($doc->corpus_sigle, 'WDD15', 'Correct corpus sigle');
282
Akron6bf3cc92019-02-07 12:11:20 +0100283$meta = $doc->meta;
284is($meta->{A_externalLink}, 'data:application/x.korap-link;title=Wikipedia,http://de.wikipedia.org/wiki/Diskussion:Arteria_interossea_communis', 'link');
285
Akron89df4fa2016-11-04 14:35:37 +0100286# Get tokenization
287$tokens = KorAP::XML::Tokenizer->new(
288 path => $doc->path,
289 doc => $doc,
290 foundry => $token_base_foundry,
291 layer => $token_base_layer,
292 name => 'tokens'
293);
294ok($tokens, 'Token Object is fine');
295
296# Initialize log4perl object
297Log::Log4perl->init({
298 'log4perl.rootLogger' => 'DEBUG, STDERR',
299 'log4perl.appender.STDERR' => 'Log::Log4perl::Appender::ScreenColoredLevels',
300 'log4perl.appender.STDERR.layout' => 'PatternLayout',
301 'log4perl.appender.STDERR.layout.ConversionPattern' => '[%r] %F %L %c - %m%n'
302});
303
304ok(!$tokens->parse, 'Token parsing is fine');
305
306
Akron9c0488f2016-01-28 14:17:15 +0100307done_testing;
308__END__