blob: ae33e8d1fbee9a08d0a5e8f431e36196aaf88074 [file] [log] [blame]
Akron9c0488f2016-01-28 14:17:15 +01001use strict;
2use warnings;
3use Test::More;
4use Data::Dumper;
5use JSON::XS;
6
7use Benchmark qw/:hireswallclock/;
8
9my $t = Benchmark->new;
10
11use utf8;
12use lib 'lib', '../lib';
13
14use File::Basename 'dirname';
15use File::Spec::Functions 'catdir';
16
17use_ok('KorAP::Document');
18
19# GOE/AGA/03828
20my $path = catdir(dirname(__FILE__), '../corpus/GOE/AGA/03828');
21# my $path = '/home/ndiewald/Repositories/korap/KorAP-sandbox/KorAP-lucene-indexer/t/GOE/AGA/03828';
22
23ok(my $doc = KorAP::Document->new( path => $path . '/' ), 'Load Korap::Document');
24ok($doc->parse, 'Parse document');
25
26is($doc->text_sigle, 'GOE_AGA.03828', 'Correct text sigle');
27is($doc->doc_sigle, 'GOE_AGA', 'Correct document sigle');
28is($doc->corpus_sigle, 'GOE', 'Correct corpus sigle');
29
30is($doc->title, 'Autobiographische Einzelheiten', 'Title');
31is($doc->pub_place, 'München', 'PubPlace');
32is($doc->pub_date, '19820000', 'Creation Date');
33ok(!$doc->sub_title, 'SubTitle');
34is($doc->author, 'Goethe, Johann Wolfgang von', 'Author');
35
36is($doc->publisher, 'Verlag C. H. Beck', 'Publisher');
37ok(!$doc->editor, 'Publisher');
38is($doc->text_type, 'Autobiographie', 'Correct Text Type');
39ok(!$doc->text_type_art, 'Correct Text Type Art');
40ok(!$doc->text_type_ref, 'Correct Text Type Ref');
41ok(!$doc->text_column, 'Correct Text Column');
42ok(!$doc->text_domain, 'Correct Text Domain');
43is($doc->creation_date, '18200000', 'Creation Date');
44is($doc->license, 'QAO-NC', 'License');
45is($doc->pages, '529-547', 'Pages');
46ok(!$doc->file_edition_statement, 'File Ed Statement');
47ok(!$doc->bibl_edition_statement, 'Bibl Ed Statement');
48is($doc->reference . "\n", <<'REF', 'Author');
49Goethe, Johann Wolfgang von: Autobiographische Einzelheiten, (Geschrieben bis 1832), In: Goethe, Johann Wolfgang von: Goethes Werke, Bd. 10, Autobiographische Schriften II, Hrsg.: Trunz, Erich. München: Verlag C. H. Beck, 1982, S. 529-547
50REF
51is($doc->language, 'de', 'Language');
52
53
54is($doc->corpus_title, 'Goethes Werke', 'Correct Corpus title');
55ok(!$doc->corpus_sub_title, 'Correct Corpus Sub title');
56is($doc->corpus_author, 'Goethe, Johann Wolfgang von', 'Correct Corpus author');
57is($doc->corpus_editor, 'Trunz, Erich', 'Correct Corpus editor');
58
59is($doc->doc_title, 'Goethe: Autobiographische Schriften II, (1817-1825, 1832)',
60 'Correct Doc title');
61ok(!$doc->doc_sub_title, 'Correct Doc Sub title');
62ok(!$doc->doc_author, 'Correct Doc author');
63ok(!$doc->doc_editor, 'Correct Doc editor');
64
65# Tokenization
66use_ok('KorAP::Tokenizer');
67
68my ($token_base_foundry, $token_base_layer) = (qw/OpenNLP Tokens/);
69
70# Get tokenization
71my $tokens = KorAP::Tokenizer->new(
72 path => $doc->path,
73 doc => $doc,
74 foundry => $token_base_foundry,
75 layer => $token_base_layer,
76 name => 'tokens'
77);
78ok($tokens, 'Token Object is fine');
79ok($tokens->parse, 'Token parsing is fine');
80
81my $output = decode_json( $tokens->to_json );
82
83is(substr($output->{data}->{text}, 0, 100), 'Autobiographische einzelheiten Selbstschilderung (1) immer tätiger, nach innen und außen fortwirkend', 'Primary Data');
84is($output->{data}->{name}, 'tokens', 'tokenName');
85is($output->{data}->{tokenSource}, 'opennlp#tokens', 'tokenSource');
86is($output->{version}, '0.03', 'version');
87is($output->{data}->{foundries}, '', 'Foundries');
88is($output->{data}->{layerInfos}, '', 'layerInfos');
89is($output->{data}->{stream}->[0]->[3], 's:Autobiographische', 'data');
90
91is($output->{textSigle}, 'GOE_AGA.03828', 'Correct text sigle');
92is($output->{docSigle}, 'GOE_AGA', 'Correct document sigle');
93is($output->{corpusSigle}, 'GOE', 'Correct corpus sigle');
94
95is($output->{author}, 'Goethe, Johann Wolfgang von', 'Author');
96is($output->{pubPlace}, 'München', 'PubPlace');
97is($output->{pubDate}, '19820000', 'Creation Date');
98is($output->{title}, 'Autobiographische Einzelheiten', 'Title');
99ok(!exists $output->{subTitle}, 'subTitle');
100
101is($output->{publisher}, 'Verlag C. H. Beck', 'Publisher');
102ok(!exists $output->{editor}, 'Editor');
103is($output->{textType}, 'Autobiographie', 'Correct Text Type');
104ok(!exists $output->{textTypeArt}, 'Correct Text Type');
105ok(!exists $output->{textTypeRef}, 'Correct Text Type');
106ok(!exists $output->{textColumn}, 'Correct Text Type');
107ok(!exists $output->{textDomain}, 'Correct Text Type');
108is($output->{creationDate}, '18200000', 'Creation Date');
109is($output->{license}, 'QAO-NC', 'License');
110is($output->{pages}, '529-547', 'Pages');
111ok(!exists $output->{fileEditionStatement}, 'Correct Text Type');
112ok(!exists $output->{biblEditionStatement}, 'Correct Text Type');
113is($output->{reference} . "\n", <<'REF', 'Author');
114Goethe, Johann Wolfgang von: Autobiographische Einzelheiten, (Geschrieben bis 1832), In: Goethe, Johann Wolfgang von: Goethes Werke, Bd. 10, Autobiographische Schriften II, Hrsg.: Trunz, Erich. München: Verlag C. H. Beck, 1982, S. 529-547
115REF
116is($output->{language}, 'de', 'Language');
117
118is($output->{corpusTitle}, 'Goethes Werke', 'Correct Corpus title');
119ok(!exists $output->{corpusSubTitle}, 'Correct Text Type');
120is($output->{corpusAuthor}, 'Goethe, Johann Wolfgang von', 'Correct Corpus title');
121is($output->{corpusEditor}, 'Trunz, Erich', 'Editor');
122
123is($output->{docTitle}, 'Goethe: Autobiographische Schriften II, (1817-1825, 1832)', 'Correct Corpus title');
124ok(!exists $output->{docSubTitle}, 'Correct Text Type');
125ok(!exists $output->{docAuthor}, 'Correct Text Type');
126ok(!exists $output->{docEditor}, 'Correct Text Type');
127
128## Base
129$tokens->add('Base', 'Sentences');
130$tokens->add('Base', 'Paragraphs');
131
132$output = decode_json( $tokens->to_json );
133
134is($output->{data}->{foundries}, 'base base/paragraphs base/sentences', 'Foundries');
135is($output->{data}->{layerInfos}, 'base/s=spans', 'layerInfos');
136my $first_token = join('||', @{$output->{data}->{stream}->[0]});
137like($first_token, qr/s:Autobiographische/, 'data');
138like($first_token, qr/_0\$<i>0<i>17/, 'data');
139like($first_token, qr!<>:base/s:s\$<b>64<i>0<i>30<i>2<b>2!, 'data');
140like($first_token, qr!<>:base\/s:t\$<b>64<i>0<i>35199<i>5226<b>0!, 'data');
141
142## OpenNLP
143$tokens->add('OpenNLP', 'Sentences');
144
145$output = decode_json( $tokens->to_json );
146is($output->{data}->{foundries},
147 'base base/paragraphs base/sentences opennlp opennlp/sentences',
148 'Foundries');
149is($output->{data}->{layerInfos}, 'base/s=spans opennlp/s=spans', 'layerInfos');
150$first_token = join('||', @{$output->{data}->{stream}->[0]});
151like($first_token, qr!<>:opennlp/s:s\$<b>64<i>0<i>254<i>32!, 'data');
152
153$tokens->add('OpenNLP', 'Morpho');
154$output = decode_json( $tokens->to_json );
155is($output->{data}->{foundries},
156 'base base/paragraphs base/sentences opennlp opennlp/morpho opennlp/sentences',
157 'Foundries');
158is($output->{data}->{layerInfos}, 'base/s=spans opennlp/p=tokens opennlp/s=spans', 'layerInfos');
159$first_token = join('||', @{$output->{data}->{stream}->[0]});
160like($first_token, qr!opennlp/p:ADJA!, 'data');
161
162## Treetagger
163$tokens->add('TreeTagger', 'Sentences');
164$output = decode_json( $tokens->to_json );
165is($output->{data}->{foundries},
166 'base base/paragraphs base/sentences opennlp opennlp/morpho opennlp/sentences treetagger treetagger/sentences',
167 'Foundries');
168is($output->{data}->{layerInfos}, 'base/s=spans opennlp/p=tokens opennlp/s=spans tt/s=spans', 'layerInfos');
169$first_token = join('||', @{$output->{data}->{stream}->[0]});
170like($first_token, qr!<>:tt/s:s\$<b>64<i>0<i>179<i>21<b>0!, 'data');
171
172$tokens->add('TreeTagger', 'Morpho');
173$output = decode_json( $tokens->to_json );
174is($output->{data}->{foundries},
175 'base base/paragraphs base/sentences opennlp opennlp/morpho opennlp/sentences treetagger treetagger/morpho treetagger/sentences',
176 'Foundries');
177
178is($output->{data}->{layerInfos}, 'base/s=spans opennlp/p=tokens opennlp/s=spans tt/l=tokens tt/p=tokens tt/s=spans', 'layerInfos');
179$first_token = join('||', @{$output->{data}->{stream}->[0]});
180like($first_token, qr!tt/l:autobiographisch\$<b>129<b>165!, 'data');
181like($first_token, qr!tt/p:ADJA\$<b>129<b>165!, 'data');
182like($first_token, qr!tt/l:Autobiographische\$<b>129<b>89!, 'data');
183like($first_token, qr!tt/p:NN\$<b>129<b>89!, 'data');
184
185## CoreNLP
186$tokens->add('CoreNLP', 'NamedEntities');
187$output = decode_json( $tokens->to_json );
188is($output->{data}->{foundries},
189 'base base/paragraphs base/sentences corenlp corenlp/namedentities opennlp opennlp/morpho opennlp/sentences treetagger treetagger/morpho treetagger/sentences',
190 'Foundries');
191is($output->{data}->{layerInfos}, 'base/s=spans corenlp/ne=tokens opennlp/p=tokens opennlp/s=spans tt/l=tokens tt/p=tokens tt/s=spans', 'layerInfos');
192
193# diag "Missing test for NamedEntities";
194
195# Problematic:
196# diag Dumper $output->{data}->{stream}->[180];
197# diag Dumper $output->{data}->{stream}->[341];
198
199$tokens->add('CoreNLP', 'Sentences');
200$output = decode_json( $tokens->to_json );
201is($output->{data}->{foundries},
202 'base base/paragraphs base/sentences corenlp corenlp/namedentities corenlp/sentences opennlp opennlp/morpho opennlp/sentences treetagger treetagger/morpho treetagger/sentences',
203 'Foundries');
204is($output->{data}->{layerInfos}, 'base/s=spans corenlp/ne=tokens corenlp/s=spans opennlp/p=tokens opennlp/s=spans tt/l=tokens tt/p=tokens tt/s=spans', 'layerInfos');
205$first_token = join('||', @{$output->{data}->{stream}->[0]});
206like($first_token, qr!<>:corenlp/s:s\$<b>64<i>0<i>254<i>32<b>0!, 'data');
207
208$tokens->add('CoreNLP', 'Morpho');
209$output = decode_json( $tokens->to_json );
210like($output->{data}->{foundries}, qr!corenlp/morpho!, 'Foundries');
211like($output->{data}->{layerInfos}, qr!corenlp/p=tokens!, 'layerInfos');
212$first_token = join('||', @{$output->{data}->{stream}->[0]});
213like($first_token, qr!corenlp/p:ADJA!, 'data');
214
215$tokens->add('CoreNLP', 'Constituency');
216$output = decode_json( $tokens->to_json );
217like($output->{data}->{foundries}, qr!corenlp/constituency!, 'Foundries');
218like($output->{data}->{layerInfos}, qr!corenlp/c=spans!, 'layerInfos');
219$first_token = join('||', @{$output->{data}->{stream}->[0]});
220like($first_token, qr!<>:corenlp/c:NP\$<b>64<i>0<i>17<i>1<b>6!, 'data');
221like($first_token, qr!<>:corenlp/c:CNP\$<b>64<i>0<i>17<i>1<b>7!, 'data');
222like($first_token, qr!<>:corenlp/c:NP\$<b>64<i>0<i>17<i>1<b>8!, 'data');
223like($first_token, qr!<>:corenlp/c:AP\$<b>64<i>0<i>17<i>1<b>9!, 'data');
224like($first_token, qr!<>:corenlp/c:PP\$<b>64<i>0<i>50<i>3<b>4!, 'data');
225like($first_token, qr!<>:corenlp/c:S\$<b>64<i>0<i>50<i>3<b>5!, 'data');
226like($first_token, qr!<>:corenlp/c:PP\$<b>64<i>0<i>58<i>5<b>2!, 'data');
227like($first_token, qr!<>:corenlp/c:S\$<b>64<i>0<i>58<i>5<b>3!, 'data');
228like($first_token, qr!<>:corenlp/c:ROOT\$<b>64<i>0<i>254<i>32<b>0!, 'data');
229like($first_token, qr!<>:corenlp/c:S\$<b>64<i>0<i>254<i>32<b>1!, 'data');
230
231## Glemm
232$tokens->add('Glemm', 'Morpho');
233$output = decode_json( $tokens->to_json );
234like($output->{data}->{foundries}, qr!glemm/morpho!, 'Foundries');
235like($output->{data}->{layerInfos}, qr!glemm/l=tokens!, 'layerInfos');
236$first_token = join('||', @{$output->{data}->{stream}->[0]});
237like($first_token, qr!glemm/l:__autobiographisch!, 'data');
238like($first_token, qr!glemm/l:\+_Auto!, 'data');
239like($first_token, qr!glemm/l:\+_biographisch!, 'data');
240like($first_token, qr!glemm/l:\+\+Biograph!, 'data');
241like($first_token, qr!glemm/l:\+\+-isch!, 'data');
242
243## Connexor
244$tokens->add('Connexor', 'Sentences');
245$output = decode_json( $tokens->to_json );
246like($output->{data}->{foundries}, qr!connexor/sentences!, 'Foundries');
247like($output->{data}->{layerInfos}, qr!cnx/s=spans!, 'layerInfos');
248$first_token = join('||', @{$output->{data}->{stream}->[0]});
249like($first_token, qr!<>:cnx/s:s\$<b>64<i>0<i>179<i>21<b>0!, 'data');
250
251$tokens->add('Connexor', 'Morpho');
252$output = decode_json( $tokens->to_json );
253like($output->{data}->{foundries}, qr!connexor/morpho!, 'Foundries');
254like($output->{data}->{layerInfos}, qr!cnx/p=tokens!, 'layerInfos');
255like($output->{data}->{layerInfos}, qr!cnx/l=tokens!, 'layerInfos');
256like($output->{data}->{layerInfos}, qr!cnx/m=tokens!, 'layerInfos');
257$first_token = join('||', @{$output->{data}->{stream}->[0]});
258like($first_token, qr!cnx/l:autobiografisch!, 'data');
259like($first_token, qr!cnx/p:A!, 'data');
260
261$tokens->add('Connexor', 'Phrase');
262$output = decode_json( $tokens->to_json );
263like($output->{data}->{foundries}, qr!connexor/phrase!, 'Foundries');
264like($output->{data}->{layerInfos}, qr!cnx/c=spans!, 'layerInfos');
265$first_token = join('||', @{$output->{data}->{stream}->[0]});
266like($first_token, qr!<>:cnx/c:np\$<b>64<i>0<i>30<i>2!, 'data');
267
268$tokens->add('Connexor', 'Syntax');
269$output = decode_json( $tokens->to_json );
270like($output->{data}->{foundries}, qr!connexor/syntax!, 'Foundries');
271like($output->{data}->{layerInfos}, qr!cnx/syn=tokens!, 'layerInfos');
272$first_token = join('||', @{$output->{data}->{stream}->[0]});
273like($first_token, qr!cnx/syn:\@PREMOD!, 'data');
274
275## Mate
276$tokens->add('Mate', 'Morpho');
277$output = decode_json( $tokens->to_json );
278like($output->{data}->{foundries}, qr!mate/morpho!, 'Foundries');
279like($output->{data}->{layerInfos}, qr!mate/p=tokens!, 'layerInfos');
280like($output->{data}->{layerInfos}, qr!mate/l=tokens!, 'layerInfos');
281like($output->{data}->{layerInfos}, qr!mate/m=tokens!, 'layerInfos');
282$first_token = join('||', @{$output->{data}->{stream}->[0]});
283like($first_token, qr!mate/l:autobiographisch!, 'data');
284like($first_token, qr!mate/p:NN!, 'data');
285like($first_token, qr!mate/m:case:nom!, 'data');
286like($first_token, qr!mate/m:number:pl!, 'data');
287like($first_token, qr!mate/m:gender:\*!, 'data');
288
289# diag "No test for mate dependency";
290
291## XIP
292$tokens->add('XIP', 'Sentences');
293$output = decode_json( $tokens->to_json );
294like($output->{data}->{foundries}, qr!xip/sentences!, 'Foundries');
295like($output->{data}->{layerInfos}, qr!xip/s=spans!, 'layerInfos');
296$first_token = join('||', @{$output->{data}->{stream}->[0]});
297like($first_token, qr!<>:xip/s:s\$<b>64<i>0<i>179<i>21!, 'data');
298
299$tokens->add('XIP', 'Morpho');
300$output = decode_json( $tokens->to_json );
301like($output->{data}->{foundries}, qr!xip/morpho!, 'Foundries');
302like($output->{data}->{layerInfos}, qr!xip/l=tokens!, 'layerInfos');
303like($output->{data}->{layerInfos}, qr!xip/p=tokens!, 'layerInfos');
304$first_token = join('||', @{$output->{data}->{stream}->[0]});
305like($first_token, qr!<>:xip/s:s\$<b>64<i>0<i>179<i>21!, 'data');
306
307$tokens->add('XIP', 'Constituency');
308$output = decode_json( $tokens->to_json );
309like($output->{data}->{foundries}, qr!xip/constituency!, 'Foundries');
310like($output->{data}->{layerInfos}, qr!xip/c=spans!, 'layerInfos');
311$first_token = join('||', @{$output->{data}->{stream}->[0]});
312like($first_token, qr!<>:xip/c:NP\$<b>64<i>0<i>17<i>1<b>1!, 'data');
313like($first_token, qr!<>:xip/c:AP\$<b>64<i>0<i>17<i>1<b>2!, 'data');
314like($first_token, qr!<>:xip/c:ADJ\$<b>64<i>0<i>17<i>1<b>3!, 'data');
315like($first_token, qr!<>:xip/c:TOP\$<b>64<i>0<i>179<i>21<b>0!, 'data');
316
317# diag "No test for xip dependency";
318
319# print timestr(timediff(Benchmark->new, $t));
320
321done_testing;
322__END__