blob: 1c104a51f8afc6c1ad2966a87f47de79e7718a6b [file] [log] [blame]
Akron4c679192018-01-16 17:41:49 +01001use strict;
2use warnings;
3use Test::More;
4use Data::Dumper;
5use JSON::XS;
6
7use Benchmark qw/:hireswallclock/;
8
9my $t = Benchmark->new;
10
11use utf8;
12use lib 'lib', '../lib';
13
14use File::Basename 'dirname';
15use File::Spec::Functions 'catdir';
16
17use_ok('KorAP::XML::Krill');
18
19# This will Check LWC annotations
20
21# New
22
23my $path = catdir(dirname(__FILE__), '../corpus/WPD17/000/22053');
24
25ok(my $doc = KorAP::XML::Krill->new( path => $path . '/' ), 'Load Korap::Document');
26ok($doc->parse, 'Parse document');
27
28is($doc->text_sigle, 'WPD17/000/22053', 'Correct text sigle');
29is($doc->doc_sigle, 'WPD17/000', 'Correct document sigle');
30is($doc->corpus_sigle, 'WPD17', 'Correct corpus sigle');
31
32my $meta = $doc->meta;
Akron5eb3aa02019-01-25 18:30:47 +010033is($meta->{T_title}, '0er', 'Title');
34is($meta->{S_pub_place}, 'URL:http://de.wikipedia.org', 'PubPlace');
35is($meta->{D_pub_date}, '20170701', 'Creation Date');
36ok(!$meta->{T_sub_title}, 'SubTitle');
37is($meta->{T_author}, 'Rogi.Official, u.a.', 'Author');
Akron4c679192018-01-16 17:41:49 +010038
Akron5eb3aa02019-01-25 18:30:47 +010039is($meta->{A_publisher}, 'Wikipedia', 'Publisher');
40is($meta->{A_editor},'wikipedia.org', 'Editor');
Akron4c679192018-01-16 17:41:49 +010041ok(!$meta->{translator}, 'Translator');
Akron5eb3aa02019-01-25 18:30:47 +010042is($meta->{S_text_type}, 'Enzyklopädie', 'Correct Text Type');
43is($meta->{S_text_type_art}, 'Enzyklopädie-Artikel', 'Correct Text Type Art');
44ok(!$meta->{S_text_type_ref}, 'Correct Text Type Ref');
45ok(!$meta->{S_text_column}, 'Correct Text Column');
46ok(!$meta->{S_text_domain}, 'Correct Text Domain');
47is($meta->{D_creation_date},'20150511', 'Creation Date');
Akron4c679192018-01-16 17:41:49 +010048
49ok(!$meta->{pages}, 'Pages');
Akron5eb3aa02019-01-25 18:30:47 +010050ok(!$meta->{A_file_edition_statement}, 'File Ed Statement');
51ok(!$meta->{A_bibl_edition_statement}, 'Bibl Ed Statement');
52is($meta->{A_reference}, '0er, In: Wikipedia - URL:http://de.wikipedia.org/wiki/0er: Wikipedia, 2017', 'Reference');
53is($meta->{S_language}, 'de', 'Language');
Akron4c679192018-01-16 17:41:49 +010054
Akron5eb3aa02019-01-25 18:30:47 +010055is($meta->{T_corpus_title}, 'Wikipedia', 'Correct Corpus title');
56ok(!$meta->{T_corpus_sub_title}, 'Correct Corpus Sub title');
57ok(!$meta->{T_corpus_author}, 'Correct Corpus author');
58is($meta->{A_corpus_editor}, 'wikipedia.org', 'Correct Corpus editor');
Akron4c679192018-01-16 17:41:49 +010059
Akron5eb3aa02019-01-25 18:30:47 +010060is($meta->{T_doc_title}, 'Wikipedia, Artikel mit Anfangszahl 0, Teil 00', 'Correct Doc title');
61ok(!$meta->{T_doc_sub_title}, 'Correct Doc Sub title');
62ok(!$meta->{T_doc_author}, 'Correct Doc author');
63ok(!$meta->{A_doc_editor}, 'Correct Doc editor');
Akron4c679192018-01-16 17:41:49 +010064
65# Tokenization
66use_ok('KorAP::XML::Tokenizer');
67
68my ($token_base_foundry, $token_base_layer) = (qw/Base Tokens/);
69
70# Get tokenization
71my $tokens = KorAP::XML::Tokenizer->new(
72 path => $doc->path,
73 doc => $doc,
74 foundry => $token_base_foundry,
75 layer => $token_base_layer,
76 name => 'tokens'
77);
78ok($tokens, 'Token Object is fine');
79ok($tokens->parse, 'Token parsing is fine');
80
81my $output = decode_json( $tokens->to_json );
82
83## Base
84$tokens->add('DeReKo', 'Structure', 'base_sentences_paragraphs');
85
86# LWC
87ok($tokens->add('LWC', 'Dependency'), 'Add LWC dependency annotations');
88
89$output = $tokens->to_data;
90
91is($output->{data}->{foundries},
92 'dereko dereko/structure dereko/structure/base_sentences_paragraphs lwc lwc/dependency',
93 'Foundries');
94
95is($output->{data}->{layerInfos}, 'dereko/s=spans lwc/d=rels', 'layerInfos');
96
97my $token = join('||', @{$output->{data}->{stream}->[7]});
98
Akron6727b212018-01-17 13:50:09 +010099like($token, qr!>:lwc/d:SVP\$<b>32<i>4!, 'data');
Akron4c679192018-01-16 17:41:49 +0100100like($token, qr!i:statt!, 'data');
101
102$token = join('||', @{$output->{data}->{stream}->[9]});
103
Akron6727b212018-01-17 13:50:09 +0100104like($token, qr!>:lwc/d:--\$<b>33<i>64<i>76<i>8<i>11!, 'data');
Akron4c679192018-01-16 17:41:49 +0100105like($token, qr!s:Januar!, 'data');
106
107
108$path = catdir(dirname(__FILE__), '../corpus/WPD17/060/18486');
109
110ok($doc = KorAP::XML::Krill->new( path => $path . '/' ), 'Load Korap::Document');
111ok($doc->parse, 'Parse document');
112
113$meta = $doc->meta;
114
Akron5eb3aa02019-01-25 18:30:47 +0100115is($meta->{T_doc_title}, 'Wikipedia, Artikel mit Anfangszahl 0, Teil 60', 'No doc title');
Akron4c679192018-01-16 17:41:49 +0100116ok(!exists $meta->{translator}, 'No translator');
117
Akron5eb3aa02019-01-25 18:30:47 +0100118is($meta->{K_text_class}->[0], 'staat-gesellschaft', 'text class');
119is($meta->{K_text_class}->[1], 'verbrechen', 'text class');
Akron4c679192018-01-16 17:41:49 +0100120
121
122
123
124# Get tokenization
125$tokens = KorAP::XML::Tokenizer->new(
126 path => $doc->path,
127 doc => $doc,
128 foundry => $token_base_foundry,
129 layer => $token_base_layer,
130 name => 'tokens'
131);
132ok($tokens, 'Token Object is fine');
133ok($tokens->parse, 'Token parsing is fine');
134
135## Base
136$tokens->add('DeReKo', 'Structure', 'base_sentences_paragraphs');
137
138# LWC
139ok($tokens->add('LWC', 'Dependency'), 'Add LWC dependency annotations');
140
141$output = decode_json( $tokens->to_json );
142
143$token = join('||', @{$output->{data}->{stream}->[2]});
144
Akron6727b212018-01-17 13:50:09 +0100145like($token, qr!>:lwc/d:SVP\$<b>32<i>1!, 'data');
Akron4c679192018-01-16 17:41:49 +0100146like($token, qr!s:für!, 'data');
147
148
149done_testing;
150__END__