Akron | eac374d | 2020-07-07 09:00:44 +0200 | [diff] [blame] | 1 | use strict; |
| 2 | use warnings; |
| 3 | use Test::More; |
| 4 | use File::Basename 'dirname'; |
| 5 | use File::Spec::Functions qw/catfile/; |
Peter Harders | 42e18a6 | 2020-07-21 02:43:26 +0200 | [diff] [blame] | 6 | use IO::Uncompress::Unzip; |
Akron | 079f2bd | 2020-09-09 11:00:26 +0200 | [diff] [blame^] | 7 | use utf8; |
Peter Harders | 994aff7 | 2020-07-25 09:53:35 +0200 | [diff] [blame] | 8 | use open qw(:std :utf8); # assume utf-8 encoding |
Akron | eac374d | 2020-07-07 09:00:44 +0200 | [diff] [blame] | 9 | |
| 10 | use FindBin; |
| 11 | BEGIN { |
| 12 | unshift @INC, "$FindBin::Bin/../lib"; |
| 13 | }; |
| 14 | |
Peter Harders | 42e18a6 | 2020-07-21 02:43:26 +0200 | [diff] [blame] | 15 | use_ok('Test::KorAP::XML::TEI','korap_tempfile'); |
Akron | d962747 | 2020-07-09 16:53:09 +0200 | [diff] [blame] | 16 | require_ok('KorAP::XML::TEI::Tokenizer::Aggressive'); |
| 17 | require_ok('KorAP::XML::TEI::Tokenizer::Conservative'); |
Peter Harders | 42e18a6 | 2020-07-21 02:43:26 +0200 | [diff] [blame] | 18 | require_ok('KorAP::XML::TEI::Zipper'); |
Akron | eac374d | 2020-07-07 09:00:44 +0200 | [diff] [blame] | 19 | |
| 20 | # Test aggressive |
Akron | d962747 | 2020-07-09 16:53:09 +0200 | [diff] [blame] | 21 | my $aggr = KorAP::XML::TEI::Tokenizer::Aggressive->new; |
Akron | 09e0b2c | 2020-07-28 15:57:01 +0200 | [diff] [blame] | 22 | ok($aggr->empty, 'Empty'); |
Akron | d962747 | 2020-07-09 16:53:09 +0200 | [diff] [blame] | 23 | $aggr->tokenize("Der alte Mann"); |
Akron | 09e0b2c | 2020-07-28 15:57:01 +0200 | [diff] [blame] | 24 | ok(!$aggr->empty, 'Not empty'); |
Akron | 510a88c | 2020-07-07 10:16:50 +0200 | [diff] [blame] | 25 | is_deeply($aggr, [0,3,4,8,9,13]); |
| 26 | |
Akron | 09e0b2c | 2020-07-28 15:57:01 +0200 | [diff] [blame] | 27 | $aggr->reset; |
| 28 | ok($aggr->empty, 'Empty'); |
| 29 | |
| 30 | $aggr->tokenize("Der alte bzw. der grau-melierte Mann"); |
Akron | 510a88c | 2020-07-07 10:16:50 +0200 | [diff] [blame] | 31 | is_deeply($aggr, [0,3,4,8,9,12,12,13,14,17,18,22,22,23,23,31,32,36]); |
Akron | eac374d | 2020-07-07 09:00:44 +0200 | [diff] [blame] | 32 | |
Akron | edee6e5 | 2020-07-27 14:15:11 +0200 | [diff] [blame] | 33 | like( |
| 34 | $aggr->reset->tokenize("Der")->to_string('a'), |
| 35 | qr!id="t_0"!, |
| 36 | 'Chainable' |
| 37 | ); |
| 38 | |
Akron | eac374d | 2020-07-07 09:00:44 +0200 | [diff] [blame] | 39 | # Test conservative |
Akron | d962747 | 2020-07-09 16:53:09 +0200 | [diff] [blame] | 40 | my $cons = KorAP::XML::TEI::Tokenizer::Conservative->new; |
| 41 | $cons->tokenize("Der alte Mann"); |
Akron | eac374d | 2020-07-07 09:00:44 +0200 | [diff] [blame] | 42 | is_deeply($cons, [0,3,4,8,9,13]); |
| 43 | |
Akron | 079f2bd | 2020-09-09 11:00:26 +0200 | [diff] [blame^] | 44 | $cons->reset->tokenize("Der ältere Mann"); |
| 45 | is_deeply($cons, [0,3,4,10,11,15]); |
| 46 | |
Akron | d962747 | 2020-07-09 16:53:09 +0200 | [diff] [blame] | 47 | $cons->reset->tokenize("Der alte bzw. der grau-melierte Mann"); |
Akron | 510a88c | 2020-07-07 10:16:50 +0200 | [diff] [blame] | 48 | is_deeply($cons, [0,3,4,8,9,12,12,13,14,17,18,31,32,36]); |
| 49 | |
Peter Harders | 71f072b | 2020-07-15 14:15:01 +0200 | [diff] [blame] | 50 | $cons->reset->tokenize(" Der alte bzw. der grau-melierte Mann"); |
| 51 | is_deeply($cons, [2,5,6,10,11,14,14,15,16,19,20,33,34,38]); |
| 52 | |
Akron | d962747 | 2020-07-09 16:53:09 +0200 | [diff] [blame] | 53 | $cons->reset->tokenize(". Der"); |
Akron | 510a88c | 2020-07-07 10:16:50 +0200 | [diff] [blame] | 54 | is_deeply($cons, [0,1,2,5]); |
| 55 | |
Akron | d962747 | 2020-07-09 16:53:09 +0200 | [diff] [blame] | 56 | $cons->reset->tokenize(" . Der"); |
Akron | 510a88c | 2020-07-07 10:16:50 +0200 | [diff] [blame] | 57 | is_deeply($cons, [1,2,3,6]); |
| 58 | |
Akron | d962747 | 2020-07-09 16:53:09 +0200 | [diff] [blame] | 59 | $cons->reset->tokenize(" . Der"); |
Akron | 510a88c | 2020-07-07 10:16:50 +0200 | [diff] [blame] | 60 | is_deeply($cons, [3,4,5,8]); |
| 61 | |
Akron | d962747 | 2020-07-09 16:53:09 +0200 | [diff] [blame] | 62 | $cons->reset->tokenize("... Der"); |
Akron | 510a88c | 2020-07-07 10:16:50 +0200 | [diff] [blame] | 63 | is_deeply($cons, [0,1,1,2,2,3,4,7]); |
| 64 | |
Akron | d962747 | 2020-07-09 16:53:09 +0200 | [diff] [blame] | 65 | $cons->reset->tokenize(".Der"); |
Peter Harders | 854a115 | 2020-07-22 22:48:02 +0200 | [diff] [blame] | 66 | is_deeply($cons, [0,1,1,4]); |
Akron | 510a88c | 2020-07-07 10:16:50 +0200 | [diff] [blame] | 67 | |
Akron | d962747 | 2020-07-09 16:53:09 +0200 | [diff] [blame] | 68 | $cons->reset->tokenize(".Der.... "); |
Peter Harders | 854a115 | 2020-07-22 22:48:02 +0200 | [diff] [blame] | 69 | is_deeply($cons, [0,1,1,4,4,5,5,6,6,7,7,8]); |
Akron | 510a88c | 2020-07-07 10:16:50 +0200 | [diff] [blame] | 70 | |
Akron | d962747 | 2020-07-09 16:53:09 +0200 | [diff] [blame] | 71 | $cons->reset->tokenize("..Der.... "); |
Akron | 510a88c | 2020-07-07 10:16:50 +0200 | [diff] [blame] | 72 | is_deeply($cons, [0,1,1,2,2,5,5,6,6,7,7,8,8,9]); |
| 73 | |
Peter Harders | 854a115 | 2020-07-22 22:48:02 +0200 | [diff] [blame] | 74 | $cons->reset->tokenize(". Der.... "); |
| 75 | is_deeply($cons, [0,1,2,5,5,6,6,7,7,8,8,9]); |
Akron | eac374d | 2020-07-07 09:00:44 +0200 | [diff] [blame] | 76 | |
Peter Harders | 854a115 | 2020-07-22 22:48:02 +0200 | [diff] [blame] | 77 | $cons->reset->tokenize(". .Der.... "); |
| 78 | is_deeply($cons, [0,1,2,3,3,6,6,7,7,8,8,9,9,10]); |
| 79 | |
| 80 | $cons->reset->tokenize("Der\talte\nMann"); |
| 81 | is_deeply($cons, [0,3,4,8,9,13]); |
| 82 | |
Peter Harders | 854a115 | 2020-07-22 22:48:02 +0200 | [diff] [blame] | 83 | ## Test data |
Peter Harders | 994aff7 | 2020-07-25 09:53:35 +0200 | [diff] [blame] | 84 | my $dataf = catfile(dirname(__FILE__), 'data', 'wikipedia.txt'); |
| 85 | my $data = ''; |
| 86 | |
| 87 | ok(open(my $fh, '<' . $dataf), 'Open file wikipedia.txt'); |
| 88 | |
| 89 | while (!eof($fh)) { |
| 90 | $data .= <$fh> |
| 91 | }; |
| 92 | |
| 93 | ok(close($fh), 'Close file wikipedia.txt'); |
| 94 | |
| 95 | is(134996, length($data)); |
| 96 | |
| 97 | $aggr->reset->tokenize($data); |
| 98 | is_deeply([@{$aggr}[0..25]], [1,7,8,12,14,18,19,22,23,27,28,38,39,40,40,49,49,50,50,57,58,66,67,72,72,73]); |
| 99 | is(47112, scalar(@$aggr)); |
| 100 | |
| 101 | $cons->reset->tokenize($data); |
| 102 | is_deeply([@{$cons}[0..21]], [1,7,8,12,14,18,19,22,23,27,28,38,39,40,40,57,58,66,67,72,72,73]); |
| 103 | is(42412, scalar(@$cons)); |
| 104 | |
Peter Harders | 854a115 | 2020-07-22 22:48:02 +0200 | [diff] [blame] | 105 | ## check tokenization of 'Community-Ämter aufgestiegen' |
| 106 | ## from @{cons}[19518] (=66070) to @{cons}[19519] (=66085) => 'Community-Ämter' |
| 107 | ## from @{cons}[19520] (=66086) to @{cons}[19521] (=66098) => 'aufgestiegen' |
Peter Harders | 994aff7 | 2020-07-25 09:53:35 +0200 | [diff] [blame] | 108 | my @vals_got=(66070,66085,66086,66098); |
| 109 | my @vals_exp; push @vals_exp, @{$cons}[$_] for(19518,19519,19520,19521); |
| 110 | is_deeply([@vals_exp], [@vals_got]); |
| 111 | |
Peter Harders | 854a115 | 2020-07-22 22:48:02 +0200 | [diff] [blame] | 112 | $cons->reset->tokenize("Community-\xc4mter aufgestiegen"); |
| 113 | is_deeply($cons, [0,15,16,28]); |
| 114 | |
Peter Harders | 994aff7 | 2020-07-25 09:53:35 +0200 | [diff] [blame] | 115 | $dataf = catfile(dirname(__FILE__), 'data', 'wikipedia_small.txt'); |
| 116 | $data = ''; |
| 117 | ok(open($fh, '<' . $dataf), 'Open file wikipedia_small.txt'); |
Peter Harders | 1d65f94 | 2020-07-22 23:31:00 +0200 | [diff] [blame] | 118 | while (!eof($fh)) { |
| 119 | $data .= <$fh> |
Akron | eac374d | 2020-07-07 09:00:44 +0200 | [diff] [blame] | 120 | }; |
Peter Harders | 854a115 | 2020-07-22 22:48:02 +0200 | [diff] [blame] | 121 | ok(close($fh), 'Close file wikipedia_small.txt'); |
Akron | eac374d | 2020-07-07 09:00:44 +0200 | [diff] [blame] | 122 | |
Akron | d962747 | 2020-07-09 16:53:09 +0200 | [diff] [blame] | 123 | $aggr->reset->tokenize($data); |
Peter Harders | 1d65f94 | 2020-07-22 23:31:00 +0200 | [diff] [blame] | 124 | is_deeply([@{$aggr}[0..25]], [1,7,8,12,14,18,19,22,23,27,28,38,39,40,40,49,49,50,50,57,58,66,67,72,72,73]); |
Peter Harders | 854a115 | 2020-07-22 22:48:02 +0200 | [diff] [blame] | 125 | is(366, scalar(@$aggr)); |
Akron | eac374d | 2020-07-07 09:00:44 +0200 | [diff] [blame] | 126 | |
Akron | d962747 | 2020-07-09 16:53:09 +0200 | [diff] [blame] | 127 | $cons->reset->tokenize($data); |
Peter Harders | 1d65f94 | 2020-07-22 23:31:00 +0200 | [diff] [blame] | 128 | is_deeply([@{$cons}[0..21]], [1,7,8,12,14,18,19,22,23,27,28,38,39,40,40,57,58,66,67,72,72,73]); |
Peter Harders | 854a115 | 2020-07-22 22:48:02 +0200 | [diff] [blame] | 129 | is(302, scalar(@$cons)); |
Peter Harders | 854a115 | 2020-07-22 22:48:02 +0200 | [diff] [blame] | 130 | |
Akron | eac374d | 2020-07-07 09:00:44 +0200 | [diff] [blame] | 131 | |
Peter Harders | 42e18a6 | 2020-07-21 02:43:26 +0200 | [diff] [blame] | 132 | subtest 'Test Zipper' => sub { |
| 133 | # Test Zipper |
| 134 | my ($fh, $outzip) = korap_tempfile('tokenize_zipper'); |
| 135 | my $zip = KorAP::XML::TEI::Zipper->new($outzip); |
| 136 | $fh->close; |
| 137 | |
| 138 | my $aggr = KorAP::XML::TEI::Tokenizer::Aggressive->new; |
| 139 | $aggr->tokenize("Der alte Mann"); |
| 140 | ok($aggr->to_zip( |
| 141 | $zip->new_stream('tokens.xml'), |
| 142 | 'fun' |
| 143 | ), 'Written successfully'); |
| 144 | |
| 145 | $zip->close; |
| 146 | |
| 147 | ok(-e $outzip, 'Zip exists'); |
| 148 | my $unzip = IO::Uncompress::Unzip->new($outzip, Name => 'tokens.xml'); |
| 149 | ok(!$unzip->eof, 'Unzip successful'); |
| 150 | }; |
| 151 | |
| 152 | |
Akron | eac374d | 2020-07-07 09:00:44 +0200 | [diff] [blame] | 153 | done_testing; |