blob: 03af7cc3c9d768f302f022c9557fa396af6dcd9e [file] [log] [blame]
Akron9c0488f2016-01-28 14:17:15 +01001use strict;
2use warnings;
3use Test::More;
4use Data::Dumper;
5use JSON::XS;
6
7use Benchmark qw/:hireswallclock/;
8
9my $t = Benchmark->new;
10
11use utf8;
12use lib 'lib', '../lib';
13
14use File::Basename 'dirname';
15use File::Spec::Functions 'catdir';
16
Akrone4c2e412016-01-28 15:10:50 +010017use_ok('KorAP::XML::Krill');
Akron9c0488f2016-01-28 14:17:15 +010018
19# GOE/AGA/03828
20my $path = catdir(dirname(__FILE__), '../corpus/GOE/AGA/03828');
21# my $path = '/home/ndiewald/Repositories/korap/KorAP-sandbox/KorAP-lucene-indexer/t/GOE/AGA/03828';
22
Akrone4c2e412016-01-28 15:10:50 +010023ok(my $doc = KorAP::XML::Krill->new( path => $path . '/' ), 'Load Korap::Document');
Akron9c0488f2016-01-28 14:17:15 +010024ok($doc->parse, 'Parse document');
25
Akron1cd5b872016-03-22 00:23:46 +010026is($doc->text_sigle, 'GOE/AGA/03828', 'Correct text sigle');
27is($doc->doc_sigle, 'GOE/AGA', 'Correct document sigle');
Akron9c0488f2016-01-28 14:17:15 +010028is($doc->corpus_sigle, 'GOE', 'Correct corpus sigle');
29
Akron35db6e32016-03-17 22:42:22 +010030my $meta = $doc->meta;
Akron5eb3aa02019-01-25 18:30:47 +010031is($meta->{T_title}, 'Autobiographische Einzelheiten', 'Title');
32is($meta->{S_pub_place}, 'München', 'PubPlace');
33is($meta->{D_pub_date}, '19820000', 'Creation Date');
34ok(!$meta->{T_sub_title}, 'SubTitle');
35is($meta->{T_author}, 'Goethe, Johann Wolfgang von', 'Author');
Akron9c0488f2016-01-28 14:17:15 +010036
Akron5eb3aa02019-01-25 18:30:47 +010037is($meta->{A_publisher}, 'Verlag C. H. Beck', 'Publisher');
38ok(!$meta->{A_editor}, 'Publisher');
39is($meta->{S_text_type}, 'Autobiographie', 'Correct Text Type');
40ok(!$meta->{S_text_type_art}, 'Correct Text Type Art');
41ok(!$meta->{S_text_type_ref}, 'Correct Text Type Ref');
42ok(!$meta->{S_text_column}, 'Correct Text Column');
43ok(!$meta->{S_text_domain}, 'Correct Text Domain');
44is($meta->{D_creation_date}, '18200000', 'Creation Date');
45is($meta->{S_availability}, 'QAO-NC', 'License');
46is($meta->{A_src_pages}, '529-547', 'Pages');
47ok(!$meta->{A_file_edition_statement}, 'File Ed Statement');
48ok(!$meta->{A_bibl_edition_statement}, 'Bibl Ed Statement');
49is($meta->{A_reference} . "\n", <<'REF', 'Author');
Akron9c0488f2016-01-28 14:17:15 +010050Goethe, Johann Wolfgang von: Autobiographische Einzelheiten, (Geschrieben bis 1832), In: Goethe, Johann Wolfgang von: Goethes Werke, Bd. 10, Autobiographische Schriften II, Hrsg.: Trunz, Erich. München: Verlag C. H. Beck, 1982, S. 529-547
51REF
Akron5eb3aa02019-01-25 18:30:47 +010052is($meta->{S_language}, 'de', 'Language');
Akron9c0488f2016-01-28 14:17:15 +010053
54
Akron5eb3aa02019-01-25 18:30:47 +010055is($meta->{T_corpus_title}, 'Goethes Werke', 'Correct Corpus title');
56ok(!$meta->{T_corpus_sub_title}, 'Correct Corpus Sub title');
57is($meta->{T_corpus_author}, 'Goethe, Johann Wolfgang von', 'Correct Corpus author');
58is($meta->{A_corpus_editor}, 'Trunz, Erich', 'Correct Corpus editor');
Akron9c0488f2016-01-28 14:17:15 +010059
Akron5eb3aa02019-01-25 18:30:47 +010060is($meta->{T_doc_title}, 'Goethe: Autobiographische Schriften II, (1817-1825, 1832)',
Akron9c0488f2016-01-28 14:17:15 +010061 'Correct Doc title');
Akron5eb3aa02019-01-25 18:30:47 +010062ok(!$meta->{T_doc_sub_title}, 'Correct Doc Sub title');
63ok(!$meta->{T_doc_author}, 'Correct Doc author');
64ok(!$meta->{A_doc_editor}, 'Correct Doc editor');
Akron9c0488f2016-01-28 14:17:15 +010065
66# Tokenization
Akrone4c2e412016-01-28 15:10:50 +010067use_ok('KorAP::XML::Tokenizer');
Akron9c0488f2016-01-28 14:17:15 +010068
69my ($token_base_foundry, $token_base_layer) = (qw/OpenNLP Tokens/);
70
71# Get tokenization
Akrone4c2e412016-01-28 15:10:50 +010072my $tokens = KorAP::XML::Tokenizer->new(
Akron9c0488f2016-01-28 14:17:15 +010073 path => $doc->path,
74 doc => $doc,
75 foundry => $token_base_foundry,
76 layer => $token_base_layer,
77 name => 'tokens'
78);
79ok($tokens, 'Token Object is fine');
80ok($tokens->parse, 'Token parsing is fine');
81
82my $output = decode_json( $tokens->to_json );
83
84is(substr($output->{data}->{text}, 0, 100), 'Autobiographische einzelheiten Selbstschilderung (1) immer tätiger, nach innen und außen fortwirkend', 'Primary Data');
85is($output->{data}->{name}, 'tokens', 'tokenName');
86is($output->{data}->{tokenSource}, 'opennlp#tokens', 'tokenSource');
87is($output->{version}, '0.03', 'version');
88is($output->{data}->{foundries}, '', 'Foundries');
89is($output->{data}->{layerInfos}, '', 'layerInfos');
Akron2d83a5a2016-02-26 00:21:16 +010090is($output->{data}->{stream}->[0]->[4], 's:Autobiographische', 'data');
Akron9c0488f2016-01-28 14:17:15 +010091
Akron1cd5b872016-03-22 00:23:46 +010092is($output->{textSigle}, 'GOE/AGA/03828', 'Correct text sigle');
93is($output->{docSigle}, 'GOE/AGA', 'Correct document sigle');
Akron9c0488f2016-01-28 14:17:15 +010094is($output->{corpusSigle}, 'GOE', 'Correct corpus sigle');
95
96is($output->{author}, 'Goethe, Johann Wolfgang von', 'Author');
97is($output->{pubPlace}, 'München', 'PubPlace');
98is($output->{pubDate}, '19820000', 'Creation Date');
99is($output->{title}, 'Autobiographische Einzelheiten', 'Title');
100ok(!exists $output->{subTitle}, 'subTitle');
101
102is($output->{publisher}, 'Verlag C. H. Beck', 'Publisher');
Akron5eb3aa02019-01-25 18:30:47 +0100103ok(!exists $output->{A_editor}, 'Editor');
Akron9c0488f2016-01-28 14:17:15 +0100104is($output->{textType}, 'Autobiographie', 'Correct Text Type');
105ok(!exists $output->{textTypeArt}, 'Correct Text Type');
106ok(!exists $output->{textTypeRef}, 'Correct Text Type');
107ok(!exists $output->{textColumn}, 'Correct Text Type');
108ok(!exists $output->{textDomain}, 'Correct Text Type');
109is($output->{creationDate}, '18200000', 'Creation Date');
Akron6396c302016-03-18 16:05:39 +0100110is($output->{availability}, 'QAO-NC', 'License');
Akron08d54452017-02-16 23:19:49 +0100111is($output->{srcPages}, '529-547', 'Pages');
Akron9c0488f2016-01-28 14:17:15 +0100112ok(!exists $output->{fileEditionStatement}, 'Correct Text Type');
113ok(!exists $output->{biblEditionStatement}, 'Correct Text Type');
114is($output->{reference} . "\n", <<'REF', 'Author');
115Goethe, Johann Wolfgang von: Autobiographische Einzelheiten, (Geschrieben bis 1832), In: Goethe, Johann Wolfgang von: Goethes Werke, Bd. 10, Autobiographische Schriften II, Hrsg.: Trunz, Erich. München: Verlag C. H. Beck, 1982, S. 529-547
116REF
117is($output->{language}, 'de', 'Language');
118
119is($output->{corpusTitle}, 'Goethes Werke', 'Correct Corpus title');
120ok(!exists $output->{corpusSubTitle}, 'Correct Text Type');
121is($output->{corpusAuthor}, 'Goethe, Johann Wolfgang von', 'Correct Corpus title');
122is($output->{corpusEditor}, 'Trunz, Erich', 'Editor');
123
124is($output->{docTitle}, 'Goethe: Autobiographische Schriften II, (1817-1825, 1832)', 'Correct Corpus title');
125ok(!exists $output->{docSubTitle}, 'Correct Text Type');
126ok(!exists $output->{docAuthor}, 'Correct Text Type');
127ok(!exists $output->{docEditor}, 'Correct Text Type');
128
129## Base
130$tokens->add('Base', 'Sentences');
131$tokens->add('Base', 'Paragraphs');
132
Akrond3a0bad2016-02-26 14:07:58 +0100133$output = $tokens->to_data;
Akron9c0488f2016-01-28 14:17:15 +0100134
135is($output->{data}->{foundries}, 'base base/paragraphs base/sentences', 'Foundries');
136is($output->{data}->{layerInfos}, 'base/s=spans', 'layerInfos');
137my $first_token = join('||', @{$output->{data}->{stream}->[0]});
138like($first_token, qr/s:Autobiographische/, 'data');
139like($first_token, qr/_0\$<i>0<i>17/, 'data');
140like($first_token, qr!<>:base/s:s\$<b>64<i>0<i>30<i>2<b>2!, 'data');
Akrondec43122020-03-03 11:22:25 +0100141like($first_token, qr!<>:base\/s:t\$<b>64<i>0<i>35199<i>5227<b>0!, 'data');
Akron9c0488f2016-01-28 14:17:15 +0100142
143## OpenNLP
144$tokens->add('OpenNLP', 'Sentences');
145
146$output = decode_json( $tokens->to_json );
147is($output->{data}->{foundries},
148 'base base/paragraphs base/sentences opennlp opennlp/sentences',
149 'Foundries');
150is($output->{data}->{layerInfos}, 'base/s=spans opennlp/s=spans', 'layerInfos');
151$first_token = join('||', @{$output->{data}->{stream}->[0]});
152like($first_token, qr!<>:opennlp/s:s\$<b>64<i>0<i>254<i>32!, 'data');
153
154$tokens->add('OpenNLP', 'Morpho');
Akrond3a0bad2016-02-26 14:07:58 +0100155$output = $tokens->to_data;
Akron9c0488f2016-01-28 14:17:15 +0100156is($output->{data}->{foundries},
157 'base base/paragraphs base/sentences opennlp opennlp/morpho opennlp/sentences',
158 'Foundries');
159is($output->{data}->{layerInfos}, 'base/s=spans opennlp/p=tokens opennlp/s=spans', 'layerInfos');
160$first_token = join('||', @{$output->{data}->{stream}->[0]});
161like($first_token, qr!opennlp/p:ADJA!, 'data');
162
163## Treetagger
164$tokens->add('TreeTagger', 'Sentences');
Akrond3a0bad2016-02-26 14:07:58 +0100165$output = $tokens->to_data;
Akron9c0488f2016-01-28 14:17:15 +0100166is($output->{data}->{foundries},
167 'base base/paragraphs base/sentences opennlp opennlp/morpho opennlp/sentences treetagger treetagger/sentences',
168 'Foundries');
169is($output->{data}->{layerInfos}, 'base/s=spans opennlp/p=tokens opennlp/s=spans tt/s=spans', 'layerInfos');
170$first_token = join('||', @{$output->{data}->{stream}->[0]});
171like($first_token, qr!<>:tt/s:s\$<b>64<i>0<i>179<i>21<b>0!, 'data');
172
173$tokens->add('TreeTagger', 'Morpho');
Akrond3a0bad2016-02-26 14:07:58 +0100174$output = $tokens->to_data;
Akron9c0488f2016-01-28 14:17:15 +0100175is($output->{data}->{foundries},
176 'base base/paragraphs base/sentences opennlp opennlp/morpho opennlp/sentences treetagger treetagger/morpho treetagger/sentences',
177 'Foundries');
178
179is($output->{data}->{layerInfos}, 'base/s=spans opennlp/p=tokens opennlp/s=spans tt/l=tokens tt/p=tokens tt/s=spans', 'layerInfos');
180$first_token = join('||', @{$output->{data}->{stream}->[0]});
Akron04261762018-01-29 20:16:39 +0100181like($first_token, qr!tt/l:autobiographisch!, 'data');
Akron9c0488f2016-01-28 14:17:15 +0100182like($first_token, qr!tt/p:ADJA\$<b>129<b>165!, 'data');
Akron04261762018-01-29 20:16:39 +0100183like($first_token, qr!tt/l:Autobiographische!, 'data');
Akron9c0488f2016-01-28 14:17:15 +0100184like($first_token, qr!tt/p:NN\$<b>129<b>89!, 'data');
185
186## CoreNLP
187$tokens->add('CoreNLP', 'NamedEntities');
Akrond3a0bad2016-02-26 14:07:58 +0100188$output = $tokens->to_data;
Akron9c0488f2016-01-28 14:17:15 +0100189is($output->{data}->{foundries},
190 'base base/paragraphs base/sentences corenlp corenlp/namedentities opennlp opennlp/morpho opennlp/sentences treetagger treetagger/morpho treetagger/sentences',
191 'Foundries');
192is($output->{data}->{layerInfos}, 'base/s=spans corenlp/ne=tokens opennlp/p=tokens opennlp/s=spans tt/l=tokens tt/p=tokens tt/s=spans', 'layerInfos');
193
194# diag "Missing test for NamedEntities";
195
196# Problematic:
197# diag Dumper $output->{data}->{stream}->[180];
198# diag Dumper $output->{data}->{stream}->[341];
199
200$tokens->add('CoreNLP', 'Sentences');
201$output = decode_json( $tokens->to_json );
202is($output->{data}->{foundries},
203 'base base/paragraphs base/sentences corenlp corenlp/namedentities corenlp/sentences opennlp opennlp/morpho opennlp/sentences treetagger treetagger/morpho treetagger/sentences',
204 'Foundries');
205is($output->{data}->{layerInfos}, 'base/s=spans corenlp/ne=tokens corenlp/s=spans opennlp/p=tokens opennlp/s=spans tt/l=tokens tt/p=tokens tt/s=spans', 'layerInfos');
206$first_token = join('||', @{$output->{data}->{stream}->[0]});
207like($first_token, qr!<>:corenlp/s:s\$<b>64<i>0<i>254<i>32<b>0!, 'data');
208
209$tokens->add('CoreNLP', 'Morpho');
Akrond3a0bad2016-02-26 14:07:58 +0100210$output = $tokens->to_data;
Akron9c0488f2016-01-28 14:17:15 +0100211like($output->{data}->{foundries}, qr!corenlp/morpho!, 'Foundries');
212like($output->{data}->{layerInfos}, qr!corenlp/p=tokens!, 'layerInfos');
213$first_token = join('||', @{$output->{data}->{stream}->[0]});
214like($first_token, qr!corenlp/p:ADJA!, 'data');
215
216$tokens->add('CoreNLP', 'Constituency');
Akrond3a0bad2016-02-26 14:07:58 +0100217$output = $tokens->to_data;
Akron9c0488f2016-01-28 14:17:15 +0100218like($output->{data}->{foundries}, qr!corenlp/constituency!, 'Foundries');
219like($output->{data}->{layerInfos}, qr!corenlp/c=spans!, 'layerInfos');
220$first_token = join('||', @{$output->{data}->{stream}->[0]});
221like($first_token, qr!<>:corenlp/c:NP\$<b>64<i>0<i>17<i>1<b>6!, 'data');
222like($first_token, qr!<>:corenlp/c:CNP\$<b>64<i>0<i>17<i>1<b>7!, 'data');
223like($first_token, qr!<>:corenlp/c:NP\$<b>64<i>0<i>17<i>1<b>8!, 'data');
224like($first_token, qr!<>:corenlp/c:AP\$<b>64<i>0<i>17<i>1<b>9!, 'data');
225like($first_token, qr!<>:corenlp/c:PP\$<b>64<i>0<i>50<i>3<b>4!, 'data');
226like($first_token, qr!<>:corenlp/c:S\$<b>64<i>0<i>50<i>3<b>5!, 'data');
227like($first_token, qr!<>:corenlp/c:PP\$<b>64<i>0<i>58<i>5<b>2!, 'data');
228like($first_token, qr!<>:corenlp/c:S\$<b>64<i>0<i>58<i>5<b>3!, 'data');
229like($first_token, qr!<>:corenlp/c:ROOT\$<b>64<i>0<i>254<i>32<b>0!, 'data');
230like($first_token, qr!<>:corenlp/c:S\$<b>64<i>0<i>254<i>32<b>1!, 'data');
231
232## Glemm
233$tokens->add('Glemm', 'Morpho');
Akrond3a0bad2016-02-26 14:07:58 +0100234$output = $tokens->to_data;
Akron9c0488f2016-01-28 14:17:15 +0100235like($output->{data}->{foundries}, qr!glemm/morpho!, 'Foundries');
236like($output->{data}->{layerInfos}, qr!glemm/l=tokens!, 'layerInfos');
237$first_token = join('||', @{$output->{data}->{stream}->[0]});
238like($first_token, qr!glemm/l:__autobiographisch!, 'data');
239like($first_token, qr!glemm/l:\+_Auto!, 'data');
240like($first_token, qr!glemm/l:\+_biographisch!, 'data');
241like($first_token, qr!glemm/l:\+\+Biograph!, 'data');
242like($first_token, qr!glemm/l:\+\+-isch!, 'data');
243
244## Connexor
245$tokens->add('Connexor', 'Sentences');
Akrond3a0bad2016-02-26 14:07:58 +0100246$output = $tokens->to_data;
Akron9c0488f2016-01-28 14:17:15 +0100247like($output->{data}->{foundries}, qr!connexor/sentences!, 'Foundries');
248like($output->{data}->{layerInfos}, qr!cnx/s=spans!, 'layerInfos');
249$first_token = join('||', @{$output->{data}->{stream}->[0]});
250like($first_token, qr!<>:cnx/s:s\$<b>64<i>0<i>179<i>21<b>0!, 'data');
251
252$tokens->add('Connexor', 'Morpho');
Akrond3a0bad2016-02-26 14:07:58 +0100253$output = $tokens->to_data;
Akron9c0488f2016-01-28 14:17:15 +0100254like($output->{data}->{foundries}, qr!connexor/morpho!, 'Foundries');
255like($output->{data}->{layerInfos}, qr!cnx/p=tokens!, 'layerInfos');
256like($output->{data}->{layerInfos}, qr!cnx/l=tokens!, 'layerInfos');
257like($output->{data}->{layerInfos}, qr!cnx/m=tokens!, 'layerInfos');
258$first_token = join('||', @{$output->{data}->{stream}->[0]});
259like($first_token, qr!cnx/l:autobiografisch!, 'data');
260like($first_token, qr!cnx/p:A!, 'data');
261
262$tokens->add('Connexor', 'Phrase');
Akrond3a0bad2016-02-26 14:07:58 +0100263$output = $tokens->to_data;
Akron9c0488f2016-01-28 14:17:15 +0100264like($output->{data}->{foundries}, qr!connexor/phrase!, 'Foundries');
265like($output->{data}->{layerInfos}, qr!cnx/c=spans!, 'layerInfos');
266$first_token = join('||', @{$output->{data}->{stream}->[0]});
267like($first_token, qr!<>:cnx/c:np\$<b>64<i>0<i>30<i>2!, 'data');
268
269$tokens->add('Connexor', 'Syntax');
Akrond3a0bad2016-02-26 14:07:58 +0100270$output = $tokens->to_data;
Akron9c0488f2016-01-28 14:17:15 +0100271like($output->{data}->{foundries}, qr!connexor/syntax!, 'Foundries');
272like($output->{data}->{layerInfos}, qr!cnx/syn=tokens!, 'layerInfos');
273$first_token = join('||', @{$output->{data}->{stream}->[0]});
274like($first_token, qr!cnx/syn:\@PREMOD!, 'data');
275
276## Mate
277$tokens->add('Mate', 'Morpho');
Akrond3a0bad2016-02-26 14:07:58 +0100278$output = $tokens->to_data;
Akron9c0488f2016-01-28 14:17:15 +0100279like($output->{data}->{foundries}, qr!mate/morpho!, 'Foundries');
280like($output->{data}->{layerInfos}, qr!mate/p=tokens!, 'layerInfos');
281like($output->{data}->{layerInfos}, qr!mate/l=tokens!, 'layerInfos');
282like($output->{data}->{layerInfos}, qr!mate/m=tokens!, 'layerInfos');
283$first_token = join('||', @{$output->{data}->{stream}->[0]});
284like($first_token, qr!mate/l:autobiographisch!, 'data');
285like($first_token, qr!mate/p:NN!, 'data');
286like($first_token, qr!mate/m:case:nom!, 'data');
287like($first_token, qr!mate/m:number:pl!, 'data');
288like($first_token, qr!mate/m:gender:\*!, 'data');
289
Akron9c0488f2016-01-28 14:17:15 +0100290## XIP
291$tokens->add('XIP', 'Sentences');
Akrond3a0bad2016-02-26 14:07:58 +0100292$output = $tokens->to_data;
Akron9c0488f2016-01-28 14:17:15 +0100293like($output->{data}->{foundries}, qr!xip/sentences!, 'Foundries');
294like($output->{data}->{layerInfos}, qr!xip/s=spans!, 'layerInfos');
295$first_token = join('||', @{$output->{data}->{stream}->[0]});
296like($first_token, qr!<>:xip/s:s\$<b>64<i>0<i>179<i>21!, 'data');
297
298$tokens->add('XIP', 'Morpho');
Akrond3a0bad2016-02-26 14:07:58 +0100299$output = $tokens->to_data;
Akron9c0488f2016-01-28 14:17:15 +0100300like($output->{data}->{foundries}, qr!xip/morpho!, 'Foundries');
301like($output->{data}->{layerInfos}, qr!xip/l=tokens!, 'layerInfos');
302like($output->{data}->{layerInfos}, qr!xip/p=tokens!, 'layerInfos');
303$first_token = join('||', @{$output->{data}->{stream}->[0]});
304like($first_token, qr!<>:xip/s:s\$<b>64<i>0<i>179<i>21!, 'data');
305
306$tokens->add('XIP', 'Constituency');
Akrond3a0bad2016-02-26 14:07:58 +0100307$output = $tokens->to_data;
Akron9c0488f2016-01-28 14:17:15 +0100308like($output->{data}->{foundries}, qr!xip/constituency!, 'Foundries');
309like($output->{data}->{layerInfos}, qr!xip/c=spans!, 'layerInfos');
310$first_token = join('||', @{$output->{data}->{stream}->[0]});
311like($first_token, qr!<>:xip/c:NP\$<b>64<i>0<i>17<i>1<b>1!, 'data');
312like($first_token, qr!<>:xip/c:AP\$<b>64<i>0<i>17<i>1<b>2!, 'data');
313like($first_token, qr!<>:xip/c:ADJ\$<b>64<i>0<i>17<i>1<b>3!, 'data');
314like($first_token, qr!<>:xip/c:TOP\$<b>64<i>0<i>179<i>21<b>0!, 'data');
315
Akrond3a0bad2016-02-26 14:07:58 +0100316# diag "No test for mate dependency";
Akron9c0488f2016-01-28 14:17:15 +0100317# diag "No test for xip dependency";
318
Akrond3a0bad2016-02-26 14:07:58 +0100319# diag timestr(timediff(Benchmark->new, $t));
Akron9c0488f2016-01-28 14:17:15 +0100320
321done_testing;
322__END__