Akron | b27f694 | 2015-12-09 22:34:26 +0100 | [diff] [blame] | 1 | #!/usr/bin/env perl |
| 2 | use strict; |
| 3 | use warnings; |
| 4 | use utf8; |
| 5 | use Test::More; |
| 6 | use Scalar::Util qw/weaken/; |
Akron | 35ed2f5 | 2015-12-10 00:40:20 +0100 | [diff] [blame] | 7 | use Data::Dumper; |
Akron | b27f694 | 2015-12-09 22:34:26 +0100 | [diff] [blame] | 8 | |
| 9 | use_ok('KorAP::Document'); |
| 10 | |
| 11 | use File::Basename 'dirname'; |
| 12 | use File::Spec::Functions 'catdir'; |
| 13 | |
Akron | 601fc15 | 2016-01-16 14:51:07 +0100 | [diff] [blame] | 14 | my $path = catdir(dirname(__FILE__), 'corpus', 'doc', '0001'); |
Akron | b27f694 | 2015-12-09 22:34:26 +0100 | [diff] [blame] | 15 | |
| 16 | ok(my $doc = KorAP::Document->new( |
| 17 | path => $path . '/' |
| 18 | ), 'Load Korap::Document'); |
| 19 | |
| 20 | like($doc->path, qr!$path/$!, 'Path'); |
| 21 | ok($doc->parse, 'Parse document'); |
| 22 | |
| 23 | ok($doc->primary->data, 'Primary data in existence'); |
| 24 | is($doc->primary->data_length, 129, 'Data length'); |
| 25 | |
| 26 | use_ok('KorAP::Tokenizer'); |
| 27 | |
| 28 | ok(my $tokens = KorAP::Tokenizer->new( |
| 29 | path => $doc->path, |
| 30 | doc => $doc, |
| 31 | foundry => 'OpenNLP', |
| 32 | layer => 'Tokens', |
| 33 | name => 'tokens' |
| 34 | ), 'New Tokenizer'); |
| 35 | |
| 36 | ok($tokens->parse, 'Parse'); |
| 37 | |
| 38 | ok($tokens->add('Base', 'Paragraphs'), 'Add Structure'); |
| 39 | |
Akron | 35ed2f5 | 2015-12-10 00:40:20 +0100 | [diff] [blame] | 40 | my $data = $tokens->to_data->{data}; |
| 41 | |
| 42 | like($data->{foundries}, qr!base/paragraphs!, 'data'); |
| 43 | is($data->{stream}->[0]->[0], '-:base/paragraphs$<i>1', 'Number of paragraphs'); |
| 44 | is($data->{stream}->[0]->[1], '-:tokens$<i>18', 'Number of tokens'); |
| 45 | is($data->{stream}->[0]->[2], '<>:base/s:p$<b>64<i>0<i>129<i>17<b>1', 'Paragraph'); |
| 46 | is($data->{stream}->[0]->[3], '_0$<i>0<i>3', 'Position'); |
Akron | b27f694 | 2015-12-09 22:34:26 +0100 | [diff] [blame] | 47 | |
| 48 | done_testing; |
| 49 | |
| 50 | __END__ |