blob: 944235d5afa00161e3ddd274b37202b2cd7e8014 [file] [log] [blame]
Nils Diewaldc95607a2014-11-03 21:04:05 +00001#!/usr/bin/env perl
2# source ~/perl5/perlbrew/etc/bashrc
3# perlbrew switch perl-blead@korap
4use strict;
5use warnings;
6use Test::More;
7use Data::Dumper;
8use JSON::XS;
9
10use Benchmark qw/:hireswallclock/;
11
12my $t = Benchmark->new;
13
14use utf8;
15use lib 'lib', '../lib';
16
17use File::Basename 'dirname';
18use File::Spec::Functions 'catdir';
19
20use_ok('KorAP::Document');
21
22# GOE/AGA/03828
23my $path = catdir(dirname(__FILE__), 'WDD/G27/38989');
24
25ok(my $doc = KorAP::Document->new( path => $path . '/' ), 'Load Korap::Document');
26ok($doc->parse, 'Parse document');
27
28is($doc->text_sigle, 'WDD11_G27.38989', 'Correct text sigle');
29is($doc->doc_sigle, 'WDD11_G27', 'Correct document sigle');
30is($doc->corpus_sigle, 'WDD11', 'Correct corpus sigle');
31
32is($doc->title, 'Diskussion:Gunter A. Pilz', 'Title');
33ok(!$doc->sub_title, 'No SubTitle');
34is($doc->author, '€pa, u.a.', 'Author');
35is($doc->editor, 'wikipedia.org', 'Publisher');
36
37is($doc->pub_place, 'URL:http://de.wikipedia.org', 'PubPlace');
38is($doc->publisher, 'Wikipedia', 'Publisher');
39is($doc->text_type, 'Diskussionen zu Enzyklopädie-Artikeln', 'Correct Text Type');
40ok(!$doc->text_type_art, 'Correct Text Type Art');
41ok(!$doc->text_type_ref, 'Correct Text Type Ref');
42ok(!$doc->text_domain, 'Correct Text Domain');
43is($doc->creation_date, '20070707', 'Creation date');
44is($doc->license, 'CC-BY-SA', 'License');
45ok(!$doc->pages, 'Pages');
46ok(!$doc->file_edition_statement, 'File Statement');
47ok(!$doc->bibl_edition_statement, 'Bibl Statement');
48is($doc->reference . "\n", <<'REF', 'Reference');
49Diskussion:Gunter A. Pilz, In: Wikipedia - URL:http://de.wikipedia.org/wiki/Diskussion:Gunter_A._Pilz: Wikipedia, 2007
50REF
51is($doc->language, 'de', 'Language');
52
53is($doc->corpus_title, 'Wikipedia', 'Correct Corpus title');
54ok(!$doc->corpus_sub_title, 'Correct Corpus sub title');
55ok(!$doc->corpus_author, 'Correct Corpus author');
56is($doc->corpus_editor, 'wikipedia.org', 'Correct Corpus editor');
57
58is($doc->doc_title, 'Wikipedia, Diskussionen zu Artikeln mit Anfangsbuchstabe G, Teil 27', 'Correct Doc title');
59ok(!$doc->doc_sub_title, 'Correct Doc sub title');
60ok(!$doc->doc_author, 'Correct Doc author');
61ok(!$doc->doc_editor, 'Correct doc editor');
62
63# Tokenization
64use_ok('KorAP::Tokenizer');
65
66my ($token_base_foundry, $token_base_layer) = (qw/OpenNLP Tokens/);
67
68# Get tokenization
69my $tokens = KorAP::Tokenizer->new(
70 path => $doc->path,
71 doc => $doc,
72 foundry => $token_base_foundry,
73 layer => $token_base_layer,
74 name => 'tokens'
75);
76ok($tokens, 'Token Object is fine');
77ok($tokens->parse, 'Token parsing is fine');
78
79my $output = decode_json( $tokens->to_json );
80
81is(substr($output->{data}->{text}, 0, 100), '{{War Löschkandidat|6. Juli 2007|(erl., bleibt)}}', 'Primary Data');
82is($output->{data}->{name}, 'tokens', 'tokenName');
83is($output->{data}->{tokenSource}, 'opennlp#tokens', 'tokenSource');
84is($output->{version}, '0.02', 'version');
85is($output->{data}->{foundries}, '', 'Foundries');
86is($output->{data}->{layerInfos}, '', 'layerInfos');
87is($output->{data}->{stream}->[0]->[3], 's:{War', 'data');
88
89is($output->{textSigle}, 'WDD11_G27.38989', 'Correct text sigle');
90is($output->{docSigle}, 'WDD11_G27', 'Correct document sigle');
91is($output->{corpusSigle}, 'WDD11', 'Correct corpus sigle');
92
93is($output->{title}, 'Diskussion:Gunter A. Pilz', 'Title');
94ok(!$output->{subTitle}, 'No SubTitle');
95is($output->{author}, '€pa, u.a.', 'Author');
96is($output->{editor}, 'wikipedia.org', 'Publisher');
97
98is($output->{pubPlace}, 'URL:http://de.wikipedia.org', 'PubPlace');
99is($output->{publisher}, 'Wikipedia', 'Publisher');
100is($output->{textType}, 'Diskussionen zu Enzyklopädie-Artikeln', 'Correct Text Type');
101ok(!$output->{textTypeArt}, 'Correct Text Type Art');
102ok(!$output->{textTypeRef}, 'Correct Text Type Ref');
103ok(!$output->{textDomain}, 'Correct Text Domain');
104is($output->{creationDate}, '20070707', 'Creation date');
105is($output->{license}, 'CC-BY-SA', 'License');
106ok(!$output->{pages}, 'Pages');
107ok(!$output->{fileEditionStatement}, 'File Statement');
108ok(!$output->{biblEditionStatement}, 'Bibl Statement');
109is($output->{reference} . "\n", <<'REF', 'Reference');
110Diskussion:Gunter A. Pilz, In: Wikipedia - URL:http://de.wikipedia.org/wiki/Diskussion:Gunter_A._Pilz: Wikipedia, 2007
111REF
112is($output->{language}, 'de', 'Language');
113
114is($output->{corpusTitle}, 'Wikipedia', 'Correct Corpus title');
115ok(!$output->{corpusSubTitle}, 'Correct Corpus sub title');
116ok(!$output->{corpusAuthor}, 'Correct Corpus author');
117is($output->{corpusEditor}, 'wikipedia.org', 'Correct Corpus editor');
118
119is($output->{docTitle}, 'Wikipedia, Diskussionen zu Artikeln mit Anfangsbuchstabe G, Teil 27', 'Correct Doc title');
120ok(!$output->{docSubTitle}, 'Correct Doc sub title');
121ok(!$output->{docAuthor}, 'Correct Doc author');
122ok(!$output->{docEditor}, 'Correct doc editor');
123
124## Base
125$tokens->add('Base', 'Sentences');
126
127$tokens->add('Base', 'Paragraphs');
128
129$output = decode_json( $tokens->to_json );
130
131is($output->{data}->{foundries}, 'base base/paragraphs base/sentences', 'Foundries');
132is($output->{data}->{layerInfos}, 'base/s=spans', 'layerInfos');
133my $first_token = join('||', @{$output->{data}->{stream}->[0]});
134like($first_token, qr/s:{War/, 'data');
135like($first_token, qr/_0#1-5/, 'data');
136
137
138## OpenNLP
139$tokens->add('OpenNLP', 'Sentences');
140
141$output = decode_json( $tokens->to_json );
142is($output->{data}->{foundries},
143 'base base/paragraphs base/sentences opennlp opennlp/sentences',
144 'Foundries');
145is($output->{data}->{layerInfos}, 'base/s=spans opennlp/s=spans', 'layerInfos');
146
147
148$tokens->add('OpenNLP', 'Morpho');
149$output = decode_json( $tokens->to_json );
150is($output->{data}->{foundries},
151 'base base/paragraphs base/sentences opennlp opennlp/morpho opennlp/sentences',
152 'Foundries');
153is($output->{data}->{layerInfos}, 'base/s=spans opennlp/p=tokens opennlp/s=spans', 'layerInfos');
154
155## Treetagger
156$tokens->add('TreeTagger', 'Sentences');
157$output = decode_json( $tokens->to_json );
158is($output->{data}->{foundries},
159 'base base/paragraphs base/sentences opennlp opennlp/morpho opennlp/sentences treetagger treetagger/sentences',
160 'Foundries');
161is($output->{data}->{layerInfos}, 'base/s=spans opennlp/p=tokens opennlp/s=spans tt/s=spans', 'layerInfos');
162
163$tokens->add('TreeTagger', 'Morpho');
164$output = decode_json( $tokens->to_json );
165is($output->{data}->{foundries},
166 'base base/paragraphs base/sentences opennlp opennlp/morpho opennlp/sentences treetagger treetagger/morpho treetagger/sentences',
167 'Foundries');
168is($output->{data}->{layerInfos}, 'base/s=spans opennlp/p=tokens opennlp/s=spans tt/l=tokens tt/p=tokens tt/s=spans', 'layerInfos');
169
170## CoreNLP
171$tokens->add('CoreNLP', 'NamedEntities');
172$output = decode_json( $tokens->to_json );
173is($output->{data}->{foundries},
174 'base base/paragraphs base/sentences opennlp opennlp/morpho opennlp/sentences treetagger treetagger/morpho treetagger/sentences',
175 'Foundries');
176is($output->{data}->{layerInfos}, 'base/s=spans opennlp/p=tokens opennlp/s=spans tt/l=tokens tt/p=tokens tt/s=spans', 'layerInfos');
177
178$tokens->add('CoreNLP', 'Sentences');
179$output = decode_json( $tokens->to_json );
180is($output->{data}->{foundries},
181 'base base/paragraphs base/sentences opennlp opennlp/morpho opennlp/sentences treetagger treetagger/morpho treetagger/sentences',
182 'Foundries');
183is($output->{data}->{layerInfos}, 'base/s=spans opennlp/p=tokens opennlp/s=spans tt/l=tokens tt/p=tokens tt/s=spans', 'layerInfos');
184
185$tokens->add('CoreNLP', 'Morpho');
186$output = decode_json( $tokens->to_json );
187unlike($output->{data}->{foundries}, qr!corenlp/morpho!, 'Foundries');
188unlike($output->{data}->{layerInfos}, qr!corenlp/p=tokens!, 'layerInfos');
189
190$tokens->add('CoreNLP', 'Constituency');
191$output = decode_json( $tokens->to_json );
192unlike($output->{data}->{foundries}, qr!corenlp/constituency!, 'Foundries');
193unlike($output->{data}->{layerInfos}, qr!corenlp/c=spans!, 'layerInfos');
194
195## Glemm
196$tokens->add('Glemm', 'Morpho');
197$output = decode_json( $tokens->to_json );
198unlike($output->{data}->{foundries}, qr!glemm/morpho!, 'Foundries');
199unlike($output->{data}->{layerInfos}, qr!glemm/l=tokens!, 'layerInfos');
200
201## Connexor
202$tokens->add('Connexor', 'Sentences');
203$output = decode_json( $tokens->to_json );
204like($output->{data}->{foundries}, qr!connexor/sentences!, 'Foundries');
205like($output->{data}->{layerInfos}, qr!cnx/s=spans!, 'layerInfos');
206
207$tokens->add('Connexor', 'Morpho');
208$output = decode_json( $tokens->to_json );
209like($output->{data}->{foundries}, qr!connexor/morpho!, 'Foundries');
210like($output->{data}->{layerInfos}, qr!cnx/p=tokens!, 'layerInfos');
211like($output->{data}->{layerInfos}, qr!cnx/l=tokens!, 'layerInfos');
212like($output->{data}->{layerInfos}, qr!cnx/m=tokens!, 'layerInfos');
213
214$tokens->add('Connexor', 'Phrase');
215$output = decode_json( $tokens->to_json );
216like($output->{data}->{foundries}, qr!connexor/phrase!, 'Foundries');
217like($output->{data}->{layerInfos}, qr!cnx/c=spans!, 'layerInfos');
218
219$tokens->add('Connexor', 'Syntax');
220$output = decode_json( $tokens->to_json );
221like($output->{data}->{foundries}, qr!connexor/syntax!, 'Foundries');
222like($output->{data}->{layerInfos}, qr!cnx/syn=tokens!, 'layerInfos');
223
224## Mate
225$tokens->add('Mate', 'Morpho');
226$output = decode_json( $tokens->to_json );
227like($output->{data}->{foundries}, qr!mate/morpho!, 'Foundries');
228like($output->{data}->{layerInfos}, qr!mate/p=tokens!, 'layerInfos');
229like($output->{data}->{layerInfos}, qr!mate/l=tokens!, 'layerInfos');
230like($output->{data}->{layerInfos}, qr!mate/m=tokens!, 'layerInfos');
231
232diag "No test for mate dependency";
233
234## XIP
235$tokens->add('XIP', 'Sentences');
236$output = decode_json( $tokens->to_json );
237like($output->{data}->{foundries}, qr!xip/sentences!, 'Foundries');
238like($output->{data}->{layerInfos}, qr!xip/s=spans!, 'layerInfos');
239
240$tokens->add('XIP', 'Morpho');
241$output = decode_json( $tokens->to_json );
242like($output->{data}->{foundries}, qr!xip/morpho!, 'Foundries');
243like($output->{data}->{layerInfos}, qr!xip/l=tokens!, 'layerInfos');
244like($output->{data}->{layerInfos}, qr!xip/p=tokens!, 'layerInfos');
245
246
247$tokens->add('XIP', 'Constituency');
248$output = decode_json( $tokens->to_json );
249like($output->{data}->{foundries}, qr!xip/constituency!, 'Foundries');
250like($output->{data}->{layerInfos}, qr!xip/c=spans!, 'layerInfos');
251
252diag "No test for xip dependency";
253
254
255done_testing;
256__END__