blob: 4ed33d5daf2d5e4cb15da74f26d09249e3fd2c6b [file] [log] [blame]
Akron9c0488f2016-01-28 14:17:15 +01001#!/usr/bin/env perl
2# source ~/perl5/perlbrew/etc/bashrc
3# perlbrew switch perl-blead@korap
4use strict;
5use warnings;
6use Test::More;
7use Data::Dumper;
8use JSON::XS;
9
10use Benchmark qw/:hireswallclock/;
11
12my $t = Benchmark->new;
13
14use utf8;
15use lib 'lib', '../lib';
16
17use File::Basename 'dirname';
18use File::Spec::Functions 'catdir';
19
20use_ok('KorAP::Document');
21
22# GOE/AGA/03828
23my $path = catdir(dirname(__FILE__), '../corpus/WDD/G27/38989');
24
25ok(my $doc = KorAP::Document->new( path => $path . '/' ), 'Load Korap::Document');
26ok($doc->parse, 'Parse document');
27
28is($doc->text_sigle, 'WDD11_G27.38989', 'Correct text sigle');
29is($doc->doc_sigle, 'WDD11_G27', 'Correct document sigle');
30is($doc->corpus_sigle, 'WDD11', 'Correct corpus sigle');
31
32is($doc->title, 'Diskussion:Gunter A. Pilz', 'Title');
33ok(!$doc->sub_title, 'No SubTitle');
34is($doc->author, '€pa, u.a.', 'Author');
35ok(!$doc->editor, 'Publisher');
36
37is($doc->pub_place, 'URL:http://de.wikipedia.org', 'PubPlace');
38is($doc->publisher, 'Wikipedia', 'Publisher');
39is($doc->text_type, 'Diskussionen zu Enzyklopädie-Artikeln', 'Correct Text Type');
40ok(!$doc->text_type_art, 'Correct Text Type Art');
41ok(!$doc->text_type_ref, 'Correct Text Type Ref');
42ok(!$doc->text_domain, 'Correct Text Domain');
43is($doc->creation_date, '20070707', 'Creation date');
44is($doc->license, 'CC-BY-SA', 'License');
45ok(!$doc->pages, 'Pages');
46ok(!$doc->file_edition_statement, 'File Statement');
47ok(!$doc->bibl_edition_statement, 'Bibl Statement');
48is($doc->reference . "\n", <<'REF', 'Reference');
49Diskussion:Gunter A. Pilz, In: Wikipedia - URL:http://de.wikipedia.org/wiki/Diskussion:Gunter_A._Pilz: Wikipedia, 2007
50REF
51is($doc->language, 'de', 'Language');
52
53is($doc->corpus_title, 'Wikipedia', 'Correct Corpus title');
54ok(!$doc->corpus_sub_title, 'Correct Corpus sub title');
55ok(!$doc->corpus_author, 'Correct Corpus author');
56is($doc->corpus_editor, 'wikipedia.org', 'Correct Corpus editor');
57
58is($doc->doc_title, 'Wikipedia, Diskussionen zu Artikeln mit Anfangsbuchstabe G, Teil 27', 'Correct Doc title');
59ok(!$doc->doc_sub_title, 'Correct Doc sub title');
60ok(!$doc->doc_author, 'Correct Doc author');
61ok(!$doc->doc_editor, 'Correct doc editor');
62
63# Tokenization
64use_ok('KorAP::Tokenizer');
65
66my ($token_base_foundry, $token_base_layer) = (qw/OpenNLP Tokens/);
67
68# Get tokenization
69my $tokens = KorAP::Tokenizer->new(
70 path => $doc->path,
71 doc => $doc,
72 foundry => $token_base_foundry,
73 layer => $token_base_layer,
74 name => 'tokens'
75);
76ok($tokens, 'Token Object is fine');
77ok($tokens->parse, 'Token parsing is fine');
78
79my $output = decode_json( $tokens->to_json );
80
81is(substr($output->{data}->{text}, 0, 100), '{{War Löschkandidat|6. Juli 2007|(erl., bleibt)}}', 'Primary Data');
82is($output->{data}->{name}, 'tokens', 'tokenName');
83is($output->{data}->{tokenSource}, 'opennlp#tokens', 'tokenSource');
84is($output->{version}, '0.03', 'version');
85is($output->{data}->{foundries}, '', 'Foundries');
86is($output->{data}->{layerInfos}, '', 'layerInfos');
87is($output->{data}->{stream}->[0]->[3], 's:{War', 'data');
88
89is($output->{textSigle}, 'WDD11_G27.38989', 'Correct text sigle');
90is($output->{docSigle}, 'WDD11_G27', 'Correct document sigle');
91is($output->{corpusSigle}, 'WDD11', 'Correct corpus sigle');
92
93is($output->{title}, 'Diskussion:Gunter A. Pilz', 'Title');
94ok(!$output->{subTitle}, 'No SubTitle');
95is($output->{author}, '€pa, u.a.', 'Author');
96ok(!$output->{editor}, 'Editor');
97
98is($output->{pubPlace}, 'URL:http://de.wikipedia.org', 'PubPlace');
99is($output->{publisher}, 'Wikipedia', 'Publisher');
100is($output->{textType}, 'Diskussionen zu Enzyklopädie-Artikeln', 'Correct Text Type');
101ok(!$output->{textTypeArt}, 'Correct Text Type Art');
102ok(!$output->{textTypeRef}, 'Correct Text Type Ref');
103ok(!$output->{textDomain}, 'Correct Text Domain');
104is($output->{creationDate}, '20070707', 'Creation date');
105is($output->{license}, 'CC-BY-SA', 'License');
106ok(!$output->{pages}, 'Pages');
107ok(!$output->{fileEditionStatement}, 'File Statement');
108ok(!$output->{biblEditionStatement}, 'Bibl Statement');
109is($output->{reference} . "\n", <<'REF', 'Reference');
110Diskussion:Gunter A. Pilz, In: Wikipedia - URL:http://de.wikipedia.org/wiki/Diskussion:Gunter_A._Pilz: Wikipedia, 2007
111REF
112is($output->{language}, 'de', 'Language');
113
114is($output->{corpusTitle}, 'Wikipedia', 'Correct Corpus title');
115ok(!$output->{corpusSubTitle}, 'Correct Corpus sub title');
116ok(!$output->{corpusAuthor}, 'Correct Corpus author');
117is($output->{corpusEditor}, 'wikipedia.org', 'Correct Corpus editor');
118
119is($output->{docTitle}, 'Wikipedia, Diskussionen zu Artikeln mit Anfangsbuchstabe G, Teil 27', 'Correct Doc title');
120ok(!$output->{docSubTitle}, 'Correct Doc sub title');
121ok(!$output->{docAuthor}, 'Correct Doc author');
122ok(!$output->{docEditor}, 'Correct doc editor');
123
124## Base
125$tokens->add('Base', 'Sentences');
126
127$tokens->add('Base', 'Paragraphs');
128
129$output = decode_json( $tokens->to_json );
130
131is($output->{data}->{foundries}, 'base base/paragraphs base/sentences', 'Foundries');
132is($output->{data}->{layerInfos}, 'base/s=spans', 'layerInfos');
133my $first_token = join('||', @{$output->{data}->{stream}->[0]});
134like($first_token, qr/s:{War/, 'data');
135like($first_token, qr/_0\$<i>1<i>5/, 'data');
136
137
138## OpenNLP
139$tokens->add('OpenNLP', 'Sentences');
140
141$output = decode_json( $tokens->to_json );
142is($output->{data}->{foundries},
143 'base base/paragraphs base/sentences opennlp opennlp/sentences',
144 'Foundries');
145is($output->{data}->{layerInfos}, 'base/s=spans opennlp/s=spans', 'layerInfos');
146
147
148$tokens->add('OpenNLP', 'Morpho');
149$output = decode_json( $tokens->to_json );
150is($output->{data}->{foundries},
151 'base base/paragraphs base/sentences opennlp opennlp/morpho opennlp/sentences',
152 'Foundries');
153is($output->{data}->{layerInfos}, 'base/s=spans opennlp/p=tokens opennlp/s=spans', 'layerInfos');
154
155
156## Treetagger
157$tokens->add('TreeTagger', 'Sentences');
158$output = decode_json( $tokens->to_json );
159is($output->{data}->{foundries},
160 'base base/paragraphs base/sentences opennlp opennlp/morpho opennlp/sentences treetagger treetagger/sentences',
161 'Foundries');
162is($output->{data}->{layerInfos}, 'base/s=spans opennlp/p=tokens opennlp/s=spans tt/s=spans', 'layerInfos');
163
164$tokens->add('TreeTagger', 'Morpho');
165$output = decode_json( $tokens->to_json );
166is($output->{data}->{foundries},
167 'base base/paragraphs base/sentences opennlp opennlp/morpho opennlp/sentences treetagger treetagger/morpho treetagger/sentences',
168 'Foundries');
169is($output->{data}->{layerInfos}, 'base/s=spans opennlp/p=tokens opennlp/s=spans tt/l=tokens tt/p=tokens tt/s=spans', 'layerInfos');
170
171## CoreNLP
172{
173 local $SIG{__WARN__} = sub {};
174 $tokens->add('CoreNLP', 'NamedEntities');
175};
176$output = decode_json( $tokens->to_json );
177is($output->{data}->{foundries},
178 'base base/paragraphs base/sentences opennlp opennlp/morpho opennlp/sentences treetagger treetagger/morpho treetagger/sentences',
179 'Foundries');
180is($output->{data}->{layerInfos}, 'base/s=spans opennlp/p=tokens opennlp/s=spans tt/l=tokens tt/p=tokens tt/s=spans', 'layerInfos');
181
182
183{
184 local $SIG{__WARN__} = sub {};
185 $tokens->add('CoreNLP', 'Sentences');
186};
187$output = decode_json( $tokens->to_json );
188is($output->{data}->{foundries},
189 'base base/paragraphs base/sentences opennlp opennlp/morpho opennlp/sentences treetagger treetagger/morpho treetagger/sentences',
190 'Foundries');
191is($output->{data}->{layerInfos}, 'base/s=spans opennlp/p=tokens opennlp/s=spans tt/l=tokens tt/p=tokens tt/s=spans', 'layerInfos');
192
193{
194 local $SIG{__WARN__} = sub {};
195 $tokens->add('CoreNLP', 'Morpho');
196};
197$output = decode_json( $tokens->to_json );
198unlike($output->{data}->{foundries}, qr!corenlp/morpho!, 'Foundries');
199unlike($output->{data}->{layerInfos}, qr!corenlp/p=tokens!, 'layerInfos');
200
201{
202 local $SIG{__WARN__} = sub {};
203 $tokens->add('CoreNLP', 'Constituency');
204};
205$output = decode_json( $tokens->to_json );
206unlike($output->{data}->{foundries}, qr!corenlp/constituency!, 'Foundries');
207unlike($output->{data}->{layerInfos}, qr!corenlp/c=spans!, 'layerInfos');
208
209## Glemm
210{
211 local $SIG{__WARN__} = sub {};
212 $tokens->add('Glemm', 'Morpho');
213};
214$output = decode_json( $tokens->to_json );
215unlike($output->{data}->{foundries}, qr!glemm/morpho!, 'Foundries');
216unlike($output->{data}->{layerInfos}, qr!glemm/l=tokens!, 'layerInfos');
217
218## Connexor
219$tokens->add('Connexor', 'Sentences');
220$output = decode_json( $tokens->to_json );
221like($output->{data}->{foundries}, qr!connexor/sentences!, 'Foundries');
222like($output->{data}->{layerInfos}, qr!cnx/s=spans!, 'layerInfos');
223
224$tokens->add('Connexor', 'Morpho');
225$output = decode_json( $tokens->to_json );
226like($output->{data}->{foundries}, qr!connexor/morpho!, 'Foundries');
227like($output->{data}->{layerInfos}, qr!cnx/p=tokens!, 'layerInfos');
228like($output->{data}->{layerInfos}, qr!cnx/l=tokens!, 'layerInfos');
229like($output->{data}->{layerInfos}, qr!cnx/m=tokens!, 'layerInfos');
230
231$tokens->add('Connexor', 'Phrase');
232$output = decode_json( $tokens->to_json );
233like($output->{data}->{foundries}, qr!connexor/phrase!, 'Foundries');
234like($output->{data}->{layerInfos}, qr!cnx/c=spans!, 'layerInfos');
235
236$tokens->add('Connexor', 'Syntax');
237$output = decode_json( $tokens->to_json );
238like($output->{data}->{foundries}, qr!connexor/syntax!, 'Foundries');
239like($output->{data}->{layerInfos}, qr!cnx/syn=tokens!, 'layerInfos');
240
241## Mate
242$tokens->add('Mate', 'Morpho');
243$output = decode_json( $tokens->to_json );
244like($output->{data}->{foundries}, qr!mate/morpho!, 'Foundries');
245like($output->{data}->{layerInfos}, qr!mate/p=tokens!, 'layerInfos');
246like($output->{data}->{layerInfos}, qr!mate/l=tokens!, 'layerInfos');
247like($output->{data}->{layerInfos}, qr!mate/m=tokens!, 'layerInfos');
248
249# diag "No test for mate dependency";
250
251## XIP
252$tokens->add('XIP', 'Sentences');
253$output = decode_json( $tokens->to_json );
254like($output->{data}->{foundries}, qr!xip/sentences!, 'Foundries');
255like($output->{data}->{layerInfos}, qr!xip/s=spans!, 'layerInfos');
256
257$tokens->add('XIP', 'Morpho');
258$output = decode_json( $tokens->to_json );
259like($output->{data}->{foundries}, qr!xip/morpho!, 'Foundries');
260like($output->{data}->{layerInfos}, qr!xip/l=tokens!, 'layerInfos');
261like($output->{data}->{layerInfos}, qr!xip/p=tokens!, 'layerInfos');
262
263$tokens->add('XIP', 'Constituency');
264$output = decode_json( $tokens->to_json );
265like($output->{data}->{foundries}, qr!xip/constituency!, 'Foundries');
266like($output->{data}->{layerInfos}, qr!xip/c=spans!, 'layerInfos');
267
268# diag "No test for xip dependency";
269
270done_testing;
271__END__