Akron | 57510c1 | 2019-01-04 14:58:53 +0100 | [diff] [blame] | 1 | use strict; |
| 2 | use warnings; |
| 3 | use Test::More; |
| 4 | use Data::Dumper; |
| 5 | use JSON::XS; |
| 6 | use Log::Log4perl; |
| 7 | use utf8; |
| 8 | |
| 9 | use Benchmark qw/:hireswallclock/; |
| 10 | |
| 11 | my $t = Benchmark->new; |
| 12 | |
| 13 | # Initialize log4perl object |
| 14 | #Log::Log4perl->init({ |
| 15 | # 'log4perl.rootLogger' => 'TRACE, STDERR', |
| 16 | # 'log4perl.appender.STDERR' => 'Log::Log4perl::Appender::ScreenColoredLevels', |
| 17 | # 'log4perl.appender.STDERR.layout' => 'PatternLayout', |
| 18 | # 'log4perl.appender.STDERR.layout.ConversionPattern' => '[%r] %F %L %c - %m%n' |
| 19 | #}); |
| 20 | |
| 21 | |
| 22 | use File::Basename 'dirname'; |
| 23 | use File::Spec::Functions 'catdir'; |
| 24 | |
| 25 | use_ok('KorAP::XML::Krill'); |
| 26 | |
| 27 | my $path = catdir(dirname(__FILE__), '..', 'corpus', 'AGD-scrambled', 'DOC', '00001'); |
| 28 | |
| 29 | ok(my $doc = KorAP::XML::Krill->new( path => $path . '/' ), 'Load Korap::Document'); |
| 30 | ok($doc->parse, 'Parse document'); |
| 31 | |
| 32 | is($doc->text_sigle, 'AGD/DOC/00001', 'Correct text sigle'); |
| 33 | is($doc->doc_sigle, 'AGD/DOC', 'Correct document sigle'); |
| 34 | is($doc->corpus_sigle, 'AGD', 'Correct corpus sigle'); |
| 35 | |
| 36 | my $meta = $doc->meta; |
| 37 | is($meta->{T_title}, 'FOLK_E_00321_SE_01_T_01_DF_01', 'Title'); |
| 38 | is($meta->{D_creation_date}, '20181112', 'Title'); |
| 39 | |
Akron | 2029455 | 2019-11-29 16:15:35 +0100 | [diff] [blame] | 40 | is($meta->{A_externalLink}, 'data:application/x.korap-link;title=DGD,'. |
| 41 | 'https://dgd.ids-mannheim.de/DGD2Web/ExternalAccessServlet?command=displayData'. |
| 42 | '&id=FOLK_E_00321_SE_01_T_01', 'External link'); |
Akron | 57510c1 | 2019-01-04 14:58:53 +0100 | [diff] [blame] | 43 | |
| 44 | # Tokenization |
| 45 | use_ok('KorAP::XML::Tokenizer'); |
| 46 | |
| 47 | my ($token_base_foundry, $token_base_layer) = (qw/DGD Annot/); |
| 48 | |
| 49 | # Get tokenization |
| 50 | my $tokens = KorAP::XML::Tokenizer->new( |
| 51 | path => $doc->path, |
| 52 | doc => $doc, |
| 53 | foundry => $token_base_foundry, |
| 54 | layer => $token_base_layer, |
| 55 | name => 'tokens', |
Akron | f1849aa | 2019-12-16 23:35:33 +0100 | [diff] [blame] | 56 | non_verbal_tokens => 1 |
Akron | 57510c1 | 2019-01-04 14:58:53 +0100 | [diff] [blame] | 57 | ); |
| 58 | |
| 59 | ok($tokens, 'Token Object is fine'); |
| 60 | ok($tokens->parse, 'Token parsing is fine'); |
| 61 | |
| 62 | my $output = decode_json( $tokens->to_json ); |
| 63 | |
| 64 | is(substr($output->{data}->{text}, 0, 100), |
| 65 | '+++++++++ ku sqn alxv a pwm ▮ xnj nq qtl ohmdgjqp ▮ ▮ ▮ ▮ ▮ fi ▮ sna ▮ alxv hn ▮ zjc ahyx ftwbramn l', |
| 66 | 'Primary Data'); |
| 67 | |
| 68 | is($output->{data}->{name}, 'tokens', 'tokenName'); |
| 69 | is($output->{data}->{tokenSource}, 'dgd#annot', 'tokenSource'); |
| 70 | |
| 71 | is($output->{version}, '0.03', 'version'); |
| 72 | is($output->{data}->{foundries}, '', 'Foundries'); |
| 73 | is($output->{data}->{layerInfos}, '', 'layerInfos'); |
Akron | f1849aa | 2019-12-16 23:35:33 +0100 | [diff] [blame] | 74 | is($output->{data}->{stream}->[0]->[4], 's:ku', 'data'); |
| 75 | is($output->{data}->{stream}->[1]->[2], 's:sqn', 'data'); |
| 76 | is($output->{data}->{stream}->[2]->[2], 's:alxv', 'data'); |
Akron | 57510c1 | 2019-01-04 14:58:53 +0100 | [diff] [blame] | 77 | is($output->{textSigle}, 'AGD/DOC/00001', 'Correct text sigle'); |
| 78 | is($output->{docSigle}, 'AGD/DOC', 'Correct document sigle'); |
| 79 | is($output->{corpusSigle}, 'AGD', 'Correct corpus sigle'); |
| 80 | |
| 81 | is($output->{title}, 'FOLK_E_00321_SE_01_T_01_DF_01', 'Title'); |
| 82 | |
| 83 | ## DeReKo |
| 84 | $tokens->add('DeReKo', 'Structure'); |
| 85 | |
| 86 | $output = decode_json( $tokens->to_json ); |
| 87 | |
| 88 | is($output->{data}->{foundries}, |
| 89 | 'dereko dereko/structure', |
| 90 | 'Foundries'); |
| 91 | is($output->{data}->{layerInfos}, 'dereko/s=spans', 'layerInfos'); |
| 92 | |
| 93 | my $first_token = join('||', @{$output->{data}->{stream}->[0]}); |
| 94 | like($first_token, qr!<>:dereko/s:text!); |
| 95 | |
| 96 | ## DGD |
Akron | c29b8e1 | 2019-12-16 14:28:09 +0100 | [diff] [blame] | 97 | ok($tokens->add('DGD', 'Morpho'), 'Add Morpho'); |
Akron | 57510c1 | 2019-01-04 14:58:53 +0100 | [diff] [blame] | 98 | |
| 99 | $output = decode_json( $tokens->to_json ); |
| 100 | is($output->{data}->{foundries}, |
| 101 | 'dereko dereko/structure dgd dgd/morpho', |
| 102 | 'Foundries'); |
| 103 | is($output->{data}->{layerInfos}, 'dereko/s=spans dgd/l=tokens dgd/p=tokens dgd/para=tokens', |
| 104 | 'layerInfos'); |
| 105 | |
Akron | f1849aa | 2019-12-16 23:35:33 +0100 | [diff] [blame] | 106 | my $third_token = join('||', @{$output->{data}->{stream}->[2]}); |
Akron | 57510c1 | 2019-01-04 14:58:53 +0100 | [diff] [blame] | 107 | like($third_token, qr!dgd/l:alui!); |
| 108 | like($third_token, qr!dgd/p:VMGWY!); |
| 109 | like($third_token, qr!i:alxv!); |
| 110 | like($third_token, qr!s:alxv!); |
| 111 | |
Akron | c29b8e1 | 2019-12-16 14:28:09 +0100 | [diff] [blame] | 112 | ## DGD base sentences |
| 113 | ok($tokens->add('DGD', 'Structure'), 'Add sentences'); |
| 114 | $output = decode_json( $tokens->to_json ); |
Akron | 57510c1 | 2019-01-04 14:58:53 +0100 | [diff] [blame] | 115 | |
Akron | c29b8e1 | 2019-12-16 14:28:09 +0100 | [diff] [blame] | 116 | # Offsets are suboptimal set, but good enough |
Akron | 57510c1 | 2019-01-04 14:58:53 +0100 | [diff] [blame] | 117 | |
Akron | c29b8e1 | 2019-12-16 14:28:09 +0100 | [diff] [blame] | 118 | $first_token = join('||', @{$output->{data}->{stream}->[0]}); |
Akron | f1849aa | 2019-12-16 23:35:33 +0100 | [diff] [blame] | 119 | like($first_token, qr!<>:base/s:s\$<b>64<i>0<i>16<i>2<b>1!); |
Akron | c29b8e1 | 2019-12-16 14:28:09 +0100 | [diff] [blame] | 120 | |
Akron | b62d92a | 2020-03-01 16:32:00 +0100 | [diff] [blame] | 121 | my $token = join('||', @{$output->{data}->{stream}->[2]}); |
Akron | f1849aa | 2019-12-16 23:35:33 +0100 | [diff] [blame] | 122 | like($token, qr!<>:base/s:s\$<b>64<i>16<i>23<i>4<b>1!); |
Akron | b62d92a | 2020-03-01 16:32:00 +0100 | [diff] [blame] | 123 | $token = join('||', @{$output->{data}->{stream}->[3]}); |
Akron | c29b8e1 | 2019-12-16 14:28:09 +0100 | [diff] [blame] | 124 | unlike($token, qr!<>:base/s:s!); |
| 125 | |
Akron | b62d92a | 2020-03-01 16:32:00 +0100 | [diff] [blame] | 126 | $token = join('||', @{$output->{data}->{stream}->[4]}); |
Akron | f1849aa | 2019-12-16 23:35:33 +0100 | [diff] [blame] | 127 | like($token, qr!<>:base/s:s\$<b>64<i>23<i>27<i>5<b>1!); |
| 128 | |
| 129 | $token = join('||', @{$output->{data}->{stream}->[5]}); |
| 130 | like($token, qr!dgd/para:pause!); |
Akron | 57510c1 | 2019-01-04 14:58:53 +0100 | [diff] [blame] | 131 | |
Akron | 1cdbc9d | 2020-05-07 15:28:54 +0200 | [diff] [blame] | 132 | |
| 133 | # New revision |
| 134 | $path = catdir(dirname(__FILE__), '..', 'corpus', 'FOLK-scrambled', '00068-SE-01', 'T-05'); |
| 135 | ok($doc = KorAP::XML::Krill->new( path => $path . '/' ), 'Load Korap::Document'); |
| 136 | ok($doc->parse, 'Parse document'); |
| 137 | |
| 138 | is($doc->text_sigle, 'FOLK/00068-SE-01/T-05', 'Correct text sigle'); |
| 139 | is($doc->doc_sigle, 'FOLK/00068-SE-01', 'Correct document sigle'); |
| 140 | is($doc->corpus_sigle, 'FOLK', 'Correct corpus sigle'); |
| 141 | |
| 142 | $meta = $doc->meta; |
| 143 | is($meta->{T_title}, 'FOLK_E_00068_SE_01_T_05_DF_01', 'Title'); |
| 144 | |
| 145 | is($meta->{A_externalLink}, 'data:application/x.korap-link;title=DGD,https://dgd.ids-mannheim.de/DGD2Web/ExternalAccessServlet?command=displayData&id=FOLK_E_00068_SE_01_T_05'); |
| 146 | |
| 147 | # Tokenization |
| 148 | use_ok('KorAP::XML::Tokenizer'); |
| 149 | |
| 150 | ($token_base_foundry, $token_base_layer) = (qw/DGD Annot/); |
| 151 | |
| 152 | # Get tokenization |
| 153 | $tokens = KorAP::XML::Tokenizer->new( |
| 154 | path => $doc->path, |
| 155 | doc => $doc, |
| 156 | foundry => $token_base_foundry, |
| 157 | layer => $token_base_layer, |
| 158 | name => 'tokens', |
| 159 | non_verbal_tokens => 1 |
| 160 | ); |
| 161 | |
| 162 | ok($tokens, 'Token Object is fine'); |
| 163 | ok($tokens->parse, 'Token parsing is fine'); |
| 164 | |
| 165 | ## DeReKo |
| 166 | # $tokens->add('DeReKo', 'Structure'); |
| 167 | |
| 168 | ## DGD |
| 169 | ok($tokens->add('DGD', 'Morpho'), 'Add Morpho'); |
| 170 | |
| 171 | $output = decode_json( $tokens->to_json ); |
| 172 | |
| 173 | is(substr($output->{data}->{text}, 11, 30), |
| 174 | 'ogeuy Nva wvho zhl usblyuug Kt', |
| 175 | 'Primary Data'); |
| 176 | is($output->{data}->{name}, 'tokens', 'tokenName'); |
| 177 | is($output->{data}->{tokenSource}, 'dgd#annot', 'tokenSource'); |
| 178 | |
| 179 | is($output->{data}->{stream}->[0]->[1], |
| 180 | '<>:base/s:t$<b>64<i>0<i>39384<i>7190<b>0', |
| 181 | 'data' |
| 182 | ); |
| 183 | |
| 184 | is($output->{data}->{stream}->[0]->[2], |
| 185 | '@:dgd/para:type:micro$<b>16<s>1', |
| 186 | 'data' |
| 187 | ); |
| 188 | |
| 189 | is($output->{data}->{stream}->[0]->[3], |
| 190 | '@:dgd/para:rend:(.)$<b>16<s>1', |
| 191 | 'data' |
| 192 | ); |
| 193 | |
| 194 | is($output->{data}->{stream}->[0]->[5], |
| 195 | 'dgd/para:pause$<b>128<s>1', |
| 196 | 'data' |
| 197 | ); |
| 198 | |
| 199 | is($output->{data}->{stream}->[1]->[0], |
| 200 | '@:dgd/para:desc:short breathe in$<b>16<s>1', |
| 201 | 'data' |
| 202 | ); |
| 203 | |
| 204 | is($output->{data}->{stream}->[1]->[1], |
| 205 | "\@:dgd/para:rend:\x{b0}h\$<b>16<s>1", |
| 206 | 'data' |
| 207 | ); |
| 208 | |
| 209 | is($output->{data}->{stream}->[1]->[3], |
| 210 | 'dgd/para:vocal$<b>128<s>1', |
| 211 | 'data' |
| 212 | ); |
| 213 | |
| 214 | is($output->{data}->{stream}->[97]->[1], |
| 215 | 'dgd/l:ui', |
| 216 | 'data' |
| 217 | ); |
| 218 | |
| 219 | is($output->{data}->{stream}->[97]->[2], |
| 220 | 'dgd/p:AUUK', |
| 221 | 'data' |
| 222 | ); |
| 223 | |
| 224 | is($output->{data}->{stream}->[97]->[3], |
| 225 | 'dgd/trans:rh', |
| 226 | 'data' |
| 227 | ); |
| 228 | |
| 229 | is($output->{data}->{stream}->[97]->[4], |
| 230 | 'dgd/type:assimilated', |
| 231 | 'data' |
| 232 | ); |
| 233 | |
| 234 | |
Akron | 57510c1 | 2019-01-04 14:58:53 +0100 | [diff] [blame] | 235 | done_testing; |
| 236 | __END__ |