blob: 25a71c8130d85053d639c1a7c5886444ec785379 [file] [log] [blame]
Akron9c0488f2016-01-28 14:17:15 +01001use strict;
2use warnings;
3use Test::More;
4use Data::Dumper;
5use JSON::XS;
6
Akronfab17d32020-07-31 14:38:29 +02007if ($ENV{SKIP_REAL}) {
8 plan skip_all => 'Skip real tests';
9};
10
Akron9c0488f2016-01-28 14:17:15 +010011use Benchmark qw/:hireswallclock/;
12
13my $t = Benchmark->new;
14
15use utf8;
16use lib 'lib', '../lib';
17
18use File::Basename 'dirname';
19use File::Spec::Functions 'catdir';
20
Akrone4c2e412016-01-28 15:10:50 +010021use_ok('KorAP::XML::Krill');
Akron9c0488f2016-01-28 14:17:15 +010022
23# GOE/AGA/03828
24my $path = catdir(dirname(__FILE__), '../corpus/GOE/AGA/03828');
25# my $path = '/home/ndiewald/Repositories/korap/KorAP-sandbox/KorAP-lucene-indexer/t/GOE/AGA/03828';
26
Akrone4c2e412016-01-28 15:10:50 +010027ok(my $doc = KorAP::XML::Krill->new( path => $path . '/' ), 'Load Korap::Document');
Akron9c0488f2016-01-28 14:17:15 +010028ok($doc->parse, 'Parse document');
29
Akron1cd5b872016-03-22 00:23:46 +010030is($doc->text_sigle, 'GOE/AGA/03828', 'Correct text sigle');
31is($doc->doc_sigle, 'GOE/AGA', 'Correct document sigle');
Akron9c0488f2016-01-28 14:17:15 +010032is($doc->corpus_sigle, 'GOE', 'Correct corpus sigle');
33
Akron35db6e32016-03-17 22:42:22 +010034my $meta = $doc->meta;
Akron5eb3aa02019-01-25 18:30:47 +010035is($meta->{T_title}, 'Autobiographische Einzelheiten', 'Title');
36is($meta->{S_pub_place}, 'München', 'PubPlace');
37is($meta->{D_pub_date}, '19820000', 'Creation Date');
38ok(!$meta->{T_sub_title}, 'SubTitle');
39is($meta->{T_author}, 'Goethe, Johann Wolfgang von', 'Author');
Akron9c0488f2016-01-28 14:17:15 +010040
Akron5eb3aa02019-01-25 18:30:47 +010041is($meta->{A_publisher}, 'Verlag C. H. Beck', 'Publisher');
42ok(!$meta->{A_editor}, 'Publisher');
43is($meta->{S_text_type}, 'Autobiographie', 'Correct Text Type');
44ok(!$meta->{S_text_type_art}, 'Correct Text Type Art');
45ok(!$meta->{S_text_type_ref}, 'Correct Text Type Ref');
46ok(!$meta->{S_text_column}, 'Correct Text Column');
47ok(!$meta->{S_text_domain}, 'Correct Text Domain');
48is($meta->{D_creation_date}, '18200000', 'Creation Date');
49is($meta->{S_availability}, 'QAO-NC', 'License');
50is($meta->{A_src_pages}, '529-547', 'Pages');
51ok(!$meta->{A_file_edition_statement}, 'File Ed Statement');
52ok(!$meta->{A_bibl_edition_statement}, 'Bibl Ed Statement');
53is($meta->{A_reference} . "\n", <<'REF', 'Author');
Akron9c0488f2016-01-28 14:17:15 +010054Goethe, Johann Wolfgang von: Autobiographische Einzelheiten, (Geschrieben bis 1832), In: Goethe, Johann Wolfgang von: Goethes Werke, Bd. 10, Autobiographische Schriften II, Hrsg.: Trunz, Erich. München: Verlag C. H. Beck, 1982, S. 529-547
55REF
Akron5eb3aa02019-01-25 18:30:47 +010056is($meta->{S_language}, 'de', 'Language');
Akron9c0488f2016-01-28 14:17:15 +010057
58
Akron5eb3aa02019-01-25 18:30:47 +010059is($meta->{T_corpus_title}, 'Goethes Werke', 'Correct Corpus title');
60ok(!$meta->{T_corpus_sub_title}, 'Correct Corpus Sub title');
61is($meta->{T_corpus_author}, 'Goethe, Johann Wolfgang von', 'Correct Corpus author');
62is($meta->{A_corpus_editor}, 'Trunz, Erich', 'Correct Corpus editor');
Akron9c0488f2016-01-28 14:17:15 +010063
Akron5eb3aa02019-01-25 18:30:47 +010064is($meta->{T_doc_title}, 'Goethe: Autobiographische Schriften II, (1817-1825, 1832)',
Akron9c0488f2016-01-28 14:17:15 +010065 'Correct Doc title');
Akron5eb3aa02019-01-25 18:30:47 +010066ok(!$meta->{T_doc_sub_title}, 'Correct Doc Sub title');
67ok(!$meta->{T_doc_author}, 'Correct Doc author');
68ok(!$meta->{A_doc_editor}, 'Correct Doc editor');
Akron9c0488f2016-01-28 14:17:15 +010069
70# Tokenization
Akrone4c2e412016-01-28 15:10:50 +010071use_ok('KorAP::XML::Tokenizer');
Akron9c0488f2016-01-28 14:17:15 +010072
73my ($token_base_foundry, $token_base_layer) = (qw/OpenNLP Tokens/);
74
75# Get tokenization
Akrone4c2e412016-01-28 15:10:50 +010076my $tokens = KorAP::XML::Tokenizer->new(
Akron9c0488f2016-01-28 14:17:15 +010077 path => $doc->path,
78 doc => $doc,
79 foundry => $token_base_foundry,
80 layer => $token_base_layer,
81 name => 'tokens'
82);
83ok($tokens, 'Token Object is fine');
84ok($tokens->parse, 'Token parsing is fine');
85
86my $output = decode_json( $tokens->to_json );
87
88is(substr($output->{data}->{text}, 0, 100), 'Autobiographische einzelheiten Selbstschilderung (1) immer tätiger, nach innen und außen fortwirkend', 'Primary Data');
89is($output->{data}->{name}, 'tokens', 'tokenName');
90is($output->{data}->{tokenSource}, 'opennlp#tokens', 'tokenSource');
91is($output->{version}, '0.03', 'version');
92is($output->{data}->{foundries}, '', 'Foundries');
93is($output->{data}->{layerInfos}, '', 'layerInfos');
Akron2d83a5a2016-02-26 00:21:16 +010094is($output->{data}->{stream}->[0]->[4], 's:Autobiographische', 'data');
Akron9c0488f2016-01-28 14:17:15 +010095
Akron1cd5b872016-03-22 00:23:46 +010096is($output->{textSigle}, 'GOE/AGA/03828', 'Correct text sigle');
97is($output->{docSigle}, 'GOE/AGA', 'Correct document sigle');
Akron9c0488f2016-01-28 14:17:15 +010098is($output->{corpusSigle}, 'GOE', 'Correct corpus sigle');
99
100is($output->{author}, 'Goethe, Johann Wolfgang von', 'Author');
101is($output->{pubPlace}, 'München', 'PubPlace');
102is($output->{pubDate}, '19820000', 'Creation Date');
103is($output->{title}, 'Autobiographische Einzelheiten', 'Title');
104ok(!exists $output->{subTitle}, 'subTitle');
105
106is($output->{publisher}, 'Verlag C. H. Beck', 'Publisher');
Akron5eb3aa02019-01-25 18:30:47 +0100107ok(!exists $output->{A_editor}, 'Editor');
Akron9c0488f2016-01-28 14:17:15 +0100108is($output->{textType}, 'Autobiographie', 'Correct Text Type');
109ok(!exists $output->{textTypeArt}, 'Correct Text Type');
110ok(!exists $output->{textTypeRef}, 'Correct Text Type');
111ok(!exists $output->{textColumn}, 'Correct Text Type');
112ok(!exists $output->{textDomain}, 'Correct Text Type');
113is($output->{creationDate}, '18200000', 'Creation Date');
Akron6396c302016-03-18 16:05:39 +0100114is($output->{availability}, 'QAO-NC', 'License');
Akron08d54452017-02-16 23:19:49 +0100115is($output->{srcPages}, '529-547', 'Pages');
Akron9c0488f2016-01-28 14:17:15 +0100116ok(!exists $output->{fileEditionStatement}, 'Correct Text Type');
117ok(!exists $output->{biblEditionStatement}, 'Correct Text Type');
118is($output->{reference} . "\n", <<'REF', 'Author');
119Goethe, Johann Wolfgang von: Autobiographische Einzelheiten, (Geschrieben bis 1832), In: Goethe, Johann Wolfgang von: Goethes Werke, Bd. 10, Autobiographische Schriften II, Hrsg.: Trunz, Erich. München: Verlag C. H. Beck, 1982, S. 529-547
120REF
121is($output->{language}, 'de', 'Language');
122
123is($output->{corpusTitle}, 'Goethes Werke', 'Correct Corpus title');
124ok(!exists $output->{corpusSubTitle}, 'Correct Text Type');
125is($output->{corpusAuthor}, 'Goethe, Johann Wolfgang von', 'Correct Corpus title');
126is($output->{corpusEditor}, 'Trunz, Erich', 'Editor');
127
128is($output->{docTitle}, 'Goethe: Autobiographische Schriften II, (1817-1825, 1832)', 'Correct Corpus title');
129ok(!exists $output->{docSubTitle}, 'Correct Text Type');
130ok(!exists $output->{docAuthor}, 'Correct Text Type');
131ok(!exists $output->{docEditor}, 'Correct Text Type');
132
133## Base
134$tokens->add('Base', 'Sentences');
135$tokens->add('Base', 'Paragraphs');
136
Akrond3a0bad2016-02-26 14:07:58 +0100137$output = $tokens->to_data;
Akron9c0488f2016-01-28 14:17:15 +0100138
139is($output->{data}->{foundries}, 'base base/paragraphs base/sentences', 'Foundries');
140is($output->{data}->{layerInfos}, 'base/s=spans', 'layerInfos');
141my $first_token = join('||', @{$output->{data}->{stream}->[0]});
142like($first_token, qr/s:Autobiographische/, 'data');
143like($first_token, qr/_0\$<i>0<i>17/, 'data');
144like($first_token, qr!<>:base/s:s\$<b>64<i>0<i>30<i>2<b>2!, 'data');
Akrondec43122020-03-03 11:22:25 +0100145like($first_token, qr!<>:base\/s:t\$<b>64<i>0<i>35199<i>5227<b>0!, 'data');
Akron9c0488f2016-01-28 14:17:15 +0100146
147## OpenNLP
148$tokens->add('OpenNLP', 'Sentences');
149
150$output = decode_json( $tokens->to_json );
151is($output->{data}->{foundries},
152 'base base/paragraphs base/sentences opennlp opennlp/sentences',
153 'Foundries');
154is($output->{data}->{layerInfos}, 'base/s=spans opennlp/s=spans', 'layerInfos');
155$first_token = join('||', @{$output->{data}->{stream}->[0]});
156like($first_token, qr!<>:opennlp/s:s\$<b>64<i>0<i>254<i>32!, 'data');
157
158$tokens->add('OpenNLP', 'Morpho');
Akrond3a0bad2016-02-26 14:07:58 +0100159$output = $tokens->to_data;
Akron9c0488f2016-01-28 14:17:15 +0100160is($output->{data}->{foundries},
161 'base base/paragraphs base/sentences opennlp opennlp/morpho opennlp/sentences',
162 'Foundries');
163is($output->{data}->{layerInfos}, 'base/s=spans opennlp/p=tokens opennlp/s=spans', 'layerInfos');
164$first_token = join('||', @{$output->{data}->{stream}->[0]});
165like($first_token, qr!opennlp/p:ADJA!, 'data');
166
167## Treetagger
168$tokens->add('TreeTagger', 'Sentences');
Akrond3a0bad2016-02-26 14:07:58 +0100169$output = $tokens->to_data;
Akron9c0488f2016-01-28 14:17:15 +0100170is($output->{data}->{foundries},
171 'base base/paragraphs base/sentences opennlp opennlp/morpho opennlp/sentences treetagger treetagger/sentences',
172 'Foundries');
173is($output->{data}->{layerInfos}, 'base/s=spans opennlp/p=tokens opennlp/s=spans tt/s=spans', 'layerInfos');
174$first_token = join('||', @{$output->{data}->{stream}->[0]});
175like($first_token, qr!<>:tt/s:s\$<b>64<i>0<i>179<i>21<b>0!, 'data');
176
177$tokens->add('TreeTagger', 'Morpho');
Akrond3a0bad2016-02-26 14:07:58 +0100178$output = $tokens->to_data;
Akron9c0488f2016-01-28 14:17:15 +0100179is($output->{data}->{foundries},
180 'base base/paragraphs base/sentences opennlp opennlp/morpho opennlp/sentences treetagger treetagger/morpho treetagger/sentences',
181 'Foundries');
182
183is($output->{data}->{layerInfos}, 'base/s=spans opennlp/p=tokens opennlp/s=spans tt/l=tokens tt/p=tokens tt/s=spans', 'layerInfos');
184$first_token = join('||', @{$output->{data}->{stream}->[0]});
Akron04261762018-01-29 20:16:39 +0100185like($first_token, qr!tt/l:autobiographisch!, 'data');
Akron9c0488f2016-01-28 14:17:15 +0100186like($first_token, qr!tt/p:ADJA\$<b>129<b>165!, 'data');
Akron04261762018-01-29 20:16:39 +0100187like($first_token, qr!tt/l:Autobiographische!, 'data');
Akron9c0488f2016-01-28 14:17:15 +0100188like($first_token, qr!tt/p:NN\$<b>129<b>89!, 'data');
189
190## CoreNLP
191$tokens->add('CoreNLP', 'NamedEntities');
Akrond3a0bad2016-02-26 14:07:58 +0100192$output = $tokens->to_data;
Akron9c0488f2016-01-28 14:17:15 +0100193is($output->{data}->{foundries},
194 'base base/paragraphs base/sentences corenlp corenlp/namedentities opennlp opennlp/morpho opennlp/sentences treetagger treetagger/morpho treetagger/sentences',
195 'Foundries');
196is($output->{data}->{layerInfos}, 'base/s=spans corenlp/ne=tokens opennlp/p=tokens opennlp/s=spans tt/l=tokens tt/p=tokens tt/s=spans', 'layerInfos');
197
198# diag "Missing test for NamedEntities";
199
200# Problematic:
201# diag Dumper $output->{data}->{stream}->[180];
202# diag Dumper $output->{data}->{stream}->[341];
203
204$tokens->add('CoreNLP', 'Sentences');
205$output = decode_json( $tokens->to_json );
206is($output->{data}->{foundries},
207 'base base/paragraphs base/sentences corenlp corenlp/namedentities corenlp/sentences opennlp opennlp/morpho opennlp/sentences treetagger treetagger/morpho treetagger/sentences',
208 'Foundries');
209is($output->{data}->{layerInfos}, 'base/s=spans corenlp/ne=tokens corenlp/s=spans opennlp/p=tokens opennlp/s=spans tt/l=tokens tt/p=tokens tt/s=spans', 'layerInfos');
210$first_token = join('||', @{$output->{data}->{stream}->[0]});
211like($first_token, qr!<>:corenlp/s:s\$<b>64<i>0<i>254<i>32<b>0!, 'data');
212
213$tokens->add('CoreNLP', 'Morpho');
Akrond3a0bad2016-02-26 14:07:58 +0100214$output = $tokens->to_data;
Akron9c0488f2016-01-28 14:17:15 +0100215like($output->{data}->{foundries}, qr!corenlp/morpho!, 'Foundries');
216like($output->{data}->{layerInfos}, qr!corenlp/p=tokens!, 'layerInfos');
217$first_token = join('||', @{$output->{data}->{stream}->[0]});
218like($first_token, qr!corenlp/p:ADJA!, 'data');
219
220$tokens->add('CoreNLP', 'Constituency');
Akrond3a0bad2016-02-26 14:07:58 +0100221$output = $tokens->to_data;
Akron9c0488f2016-01-28 14:17:15 +0100222like($output->{data}->{foundries}, qr!corenlp/constituency!, 'Foundries');
223like($output->{data}->{layerInfos}, qr!corenlp/c=spans!, 'layerInfos');
224$first_token = join('||', @{$output->{data}->{stream}->[0]});
225like($first_token, qr!<>:corenlp/c:NP\$<b>64<i>0<i>17<i>1<b>6!, 'data');
226like($first_token, qr!<>:corenlp/c:CNP\$<b>64<i>0<i>17<i>1<b>7!, 'data');
227like($first_token, qr!<>:corenlp/c:NP\$<b>64<i>0<i>17<i>1<b>8!, 'data');
228like($first_token, qr!<>:corenlp/c:AP\$<b>64<i>0<i>17<i>1<b>9!, 'data');
229like($first_token, qr!<>:corenlp/c:PP\$<b>64<i>0<i>50<i>3<b>4!, 'data');
230like($first_token, qr!<>:corenlp/c:S\$<b>64<i>0<i>50<i>3<b>5!, 'data');
231like($first_token, qr!<>:corenlp/c:PP\$<b>64<i>0<i>58<i>5<b>2!, 'data');
232like($first_token, qr!<>:corenlp/c:S\$<b>64<i>0<i>58<i>5<b>3!, 'data');
233like($first_token, qr!<>:corenlp/c:ROOT\$<b>64<i>0<i>254<i>32<b>0!, 'data');
234like($first_token, qr!<>:corenlp/c:S\$<b>64<i>0<i>254<i>32<b>1!, 'data');
235
236## Glemm
237$tokens->add('Glemm', 'Morpho');
Akrond3a0bad2016-02-26 14:07:58 +0100238$output = $tokens->to_data;
Akron9c0488f2016-01-28 14:17:15 +0100239like($output->{data}->{foundries}, qr!glemm/morpho!, 'Foundries');
240like($output->{data}->{layerInfos}, qr!glemm/l=tokens!, 'layerInfos');
241$first_token = join('||', @{$output->{data}->{stream}->[0]});
242like($first_token, qr!glemm/l:__autobiographisch!, 'data');
243like($first_token, qr!glemm/l:\+_Auto!, 'data');
244like($first_token, qr!glemm/l:\+_biographisch!, 'data');
245like($first_token, qr!glemm/l:\+\+Biograph!, 'data');
246like($first_token, qr!glemm/l:\+\+-isch!, 'data');
247
248## Connexor
249$tokens->add('Connexor', 'Sentences');
Akrond3a0bad2016-02-26 14:07:58 +0100250$output = $tokens->to_data;
Akron9c0488f2016-01-28 14:17:15 +0100251like($output->{data}->{foundries}, qr!connexor/sentences!, 'Foundries');
252like($output->{data}->{layerInfos}, qr!cnx/s=spans!, 'layerInfos');
253$first_token = join('||', @{$output->{data}->{stream}->[0]});
254like($first_token, qr!<>:cnx/s:s\$<b>64<i>0<i>179<i>21<b>0!, 'data');
255
256$tokens->add('Connexor', 'Morpho');
Akrond3a0bad2016-02-26 14:07:58 +0100257$output = $tokens->to_data;
Akron9c0488f2016-01-28 14:17:15 +0100258like($output->{data}->{foundries}, qr!connexor/morpho!, 'Foundries');
259like($output->{data}->{layerInfos}, qr!cnx/p=tokens!, 'layerInfos');
260like($output->{data}->{layerInfos}, qr!cnx/l=tokens!, 'layerInfos');
261like($output->{data}->{layerInfos}, qr!cnx/m=tokens!, 'layerInfos');
262$first_token = join('||', @{$output->{data}->{stream}->[0]});
263like($first_token, qr!cnx/l:autobiografisch!, 'data');
264like($first_token, qr!cnx/p:A!, 'data');
265
266$tokens->add('Connexor', 'Phrase');
Akrond3a0bad2016-02-26 14:07:58 +0100267$output = $tokens->to_data;
Akron9c0488f2016-01-28 14:17:15 +0100268like($output->{data}->{foundries}, qr!connexor/phrase!, 'Foundries');
269like($output->{data}->{layerInfos}, qr!cnx/c=spans!, 'layerInfos');
270$first_token = join('||', @{$output->{data}->{stream}->[0]});
271like($first_token, qr!<>:cnx/c:np\$<b>64<i>0<i>30<i>2!, 'data');
272
273$tokens->add('Connexor', 'Syntax');
Akrond3a0bad2016-02-26 14:07:58 +0100274$output = $tokens->to_data;
Akron9c0488f2016-01-28 14:17:15 +0100275like($output->{data}->{foundries}, qr!connexor/syntax!, 'Foundries');
276like($output->{data}->{layerInfos}, qr!cnx/syn=tokens!, 'layerInfos');
277$first_token = join('||', @{$output->{data}->{stream}->[0]});
278like($first_token, qr!cnx/syn:\@PREMOD!, 'data');
279
280## Mate
281$tokens->add('Mate', 'Morpho');
Akrond3a0bad2016-02-26 14:07:58 +0100282$output = $tokens->to_data;
Akron9c0488f2016-01-28 14:17:15 +0100283like($output->{data}->{foundries}, qr!mate/morpho!, 'Foundries');
284like($output->{data}->{layerInfos}, qr!mate/p=tokens!, 'layerInfos');
285like($output->{data}->{layerInfos}, qr!mate/l=tokens!, 'layerInfos');
286like($output->{data}->{layerInfos}, qr!mate/m=tokens!, 'layerInfos');
287$first_token = join('||', @{$output->{data}->{stream}->[0]});
288like($first_token, qr!mate/l:autobiographisch!, 'data');
289like($first_token, qr!mate/p:NN!, 'data');
290like($first_token, qr!mate/m:case:nom!, 'data');
291like($first_token, qr!mate/m:number:pl!, 'data');
292like($first_token, qr!mate/m:gender:\*!, 'data');
293
Akron9c0488f2016-01-28 14:17:15 +0100294## XIP
295$tokens->add('XIP', 'Sentences');
Akrond3a0bad2016-02-26 14:07:58 +0100296$output = $tokens->to_data;
Akron9c0488f2016-01-28 14:17:15 +0100297like($output->{data}->{foundries}, qr!xip/sentences!, 'Foundries');
298like($output->{data}->{layerInfos}, qr!xip/s=spans!, 'layerInfos');
299$first_token = join('||', @{$output->{data}->{stream}->[0]});
300like($first_token, qr!<>:xip/s:s\$<b>64<i>0<i>179<i>21!, 'data');
301
302$tokens->add('XIP', 'Morpho');
Akrond3a0bad2016-02-26 14:07:58 +0100303$output = $tokens->to_data;
Akron9c0488f2016-01-28 14:17:15 +0100304like($output->{data}->{foundries}, qr!xip/morpho!, 'Foundries');
305like($output->{data}->{layerInfos}, qr!xip/l=tokens!, 'layerInfos');
306like($output->{data}->{layerInfos}, qr!xip/p=tokens!, 'layerInfos');
307$first_token = join('||', @{$output->{data}->{stream}->[0]});
308like($first_token, qr!<>:xip/s:s\$<b>64<i>0<i>179<i>21!, 'data');
309
310$tokens->add('XIP', 'Constituency');
Akrond3a0bad2016-02-26 14:07:58 +0100311$output = $tokens->to_data;
Akron9c0488f2016-01-28 14:17:15 +0100312like($output->{data}->{foundries}, qr!xip/constituency!, 'Foundries');
313like($output->{data}->{layerInfos}, qr!xip/c=spans!, 'layerInfos');
314$first_token = join('||', @{$output->{data}->{stream}->[0]});
315like($first_token, qr!<>:xip/c:NP\$<b>64<i>0<i>17<i>1<b>1!, 'data');
316like($first_token, qr!<>:xip/c:AP\$<b>64<i>0<i>17<i>1<b>2!, 'data');
317like($first_token, qr!<>:xip/c:ADJ\$<b>64<i>0<i>17<i>1<b>3!, 'data');
318like($first_token, qr!<>:xip/c:TOP\$<b>64<i>0<i>179<i>21<b>0!, 'data');
319
Akrond3a0bad2016-02-26 14:07:58 +0100320# diag "No test for mate dependency";
Akron9c0488f2016-01-28 14:17:15 +0100321# diag "No test for xip dependency";
322
Akrond3a0bad2016-02-26 14:07:58 +0100323# diag timestr(timediff(Benchmark->new, $t));
Akron9c0488f2016-01-28 14:17:15 +0100324
325done_testing;
326__END__