Akron | eac374d | 2020-07-07 09:00:44 +0200 | [diff] [blame] | 1 | use strict; |
| 2 | use warnings; |
Peter Harders | 1d65f94 | 2020-07-22 23:31:00 +0200 | [diff] [blame^] | 3 | #use open qw(:std :utf8); # see perlunifaq: What is the difference between ":encoding" and ":utf8"? |
| 4 | use open qw(:std :encoding(UTF-8)); # assume utf-8 encoding (see utf8 in Test::More) |
Akron | eac374d | 2020-07-07 09:00:44 +0200 | [diff] [blame] | 5 | use Test::More; |
| 6 | use File::Basename 'dirname'; |
| 7 | use File::Spec::Functions qw/catfile/; |
Akron | eac374d | 2020-07-07 09:00:44 +0200 | [diff] [blame] | 8 | |
| 9 | use FindBin; |
| 10 | BEGIN { |
| 11 | unshift @INC, "$FindBin::Bin/../lib"; |
| 12 | }; |
| 13 | |
Akron | d962747 | 2020-07-09 16:53:09 +0200 | [diff] [blame] | 14 | require_ok('KorAP::XML::TEI::Tokenizer::Aggressive'); |
| 15 | require_ok('KorAP::XML::TEI::Tokenizer::Conservative'); |
Akron | eac374d | 2020-07-07 09:00:44 +0200 | [diff] [blame] | 16 | |
| 17 | # Test aggressive |
Akron | d962747 | 2020-07-09 16:53:09 +0200 | [diff] [blame] | 18 | my $aggr = KorAP::XML::TEI::Tokenizer::Aggressive->new; |
| 19 | $aggr->tokenize("Der alte Mann"); |
Akron | 510a88c | 2020-07-07 10:16:50 +0200 | [diff] [blame] | 20 | is_deeply($aggr, [0,3,4,8,9,13]); |
| 21 | |
Akron | d962747 | 2020-07-09 16:53:09 +0200 | [diff] [blame] | 22 | $aggr->reset->tokenize("Der alte bzw. der grau-melierte Mann"); |
Akron | 510a88c | 2020-07-07 10:16:50 +0200 | [diff] [blame] | 23 | is_deeply($aggr, [0,3,4,8,9,12,12,13,14,17,18,22,22,23,23,31,32,36]); |
Akron | eac374d | 2020-07-07 09:00:44 +0200 | [diff] [blame] | 24 | |
| 25 | # Test conservative |
Akron | d962747 | 2020-07-09 16:53:09 +0200 | [diff] [blame] | 26 | my $cons = KorAP::XML::TEI::Tokenizer::Conservative->new; |
| 27 | $cons->tokenize("Der alte Mann"); |
Akron | eac374d | 2020-07-07 09:00:44 +0200 | [diff] [blame] | 28 | is_deeply($cons, [0,3,4,8,9,13]); |
| 29 | |
Akron | d962747 | 2020-07-09 16:53:09 +0200 | [diff] [blame] | 30 | $cons->reset->tokenize("Der alte bzw. der grau-melierte Mann"); |
Akron | 510a88c | 2020-07-07 10:16:50 +0200 | [diff] [blame] | 31 | is_deeply($cons, [0,3,4,8,9,12,12,13,14,17,18,31,32,36]); |
| 32 | |
Peter Harders | 71f072b | 2020-07-15 14:15:01 +0200 | [diff] [blame] | 33 | $cons->reset->tokenize(" Der alte bzw. der grau-melierte Mann"); |
| 34 | is_deeply($cons, [2,5,6,10,11,14,14,15,16,19,20,33,34,38]); |
| 35 | |
Akron | d962747 | 2020-07-09 16:53:09 +0200 | [diff] [blame] | 36 | $cons->reset->tokenize(". Der"); |
Akron | 510a88c | 2020-07-07 10:16:50 +0200 | [diff] [blame] | 37 | is_deeply($cons, [0,1,2,5]); |
| 38 | |
Akron | d962747 | 2020-07-09 16:53:09 +0200 | [diff] [blame] | 39 | $cons->reset->tokenize(" . Der"); |
Akron | 510a88c | 2020-07-07 10:16:50 +0200 | [diff] [blame] | 40 | is_deeply($cons, [1,2,3,6]); |
| 41 | |
Akron | d962747 | 2020-07-09 16:53:09 +0200 | [diff] [blame] | 42 | $cons->reset->tokenize(" . Der"); |
Akron | 510a88c | 2020-07-07 10:16:50 +0200 | [diff] [blame] | 43 | is_deeply($cons, [3,4,5,8]); |
| 44 | |
Akron | d962747 | 2020-07-09 16:53:09 +0200 | [diff] [blame] | 45 | $cons->reset->tokenize("... Der"); |
Akron | 510a88c | 2020-07-07 10:16:50 +0200 | [diff] [blame] | 46 | is_deeply($cons, [0,1,1,2,2,3,4,7]); |
| 47 | |
Peter Harders | 41c3562 | 2020-07-12 01:16:22 +0200 | [diff] [blame] | 48 | # TODO: |
| 49 | # bug: '.' is not tokenized |
Akron | d962747 | 2020-07-09 16:53:09 +0200 | [diff] [blame] | 50 | $cons->reset->tokenize(".Der"); |
Akron | 510a88c | 2020-07-07 10:16:50 +0200 | [diff] [blame] | 51 | is_deeply($cons, [1,4]); |
| 52 | |
Akron | d962747 | 2020-07-09 16:53:09 +0200 | [diff] [blame] | 53 | $cons->reset->tokenize(".Der.... "); |
Akron | 510a88c | 2020-07-07 10:16:50 +0200 | [diff] [blame] | 54 | is_deeply($cons, [1,4,4,5,5,6,6,7,7,8]); |
| 55 | |
Akron | d962747 | 2020-07-09 16:53:09 +0200 | [diff] [blame] | 56 | $cons->reset->tokenize("..Der.... "); |
Akron | 510a88c | 2020-07-07 10:16:50 +0200 | [diff] [blame] | 57 | is_deeply($cons, [0,1,1,2,2,5,5,6,6,7,7,8,8,9]); |
| 58 | |
Akron | eac374d | 2020-07-07 09:00:44 +0200 | [diff] [blame] | 59 | # Test data |
| 60 | my $dataf = catfile(dirname(__FILE__), 'data', 'wikipedia.txt'); |
| 61 | my $data = ''; |
| 62 | |
Peter Harders | 1d65f94 | 2020-07-22 23:31:00 +0200 | [diff] [blame^] | 63 | ok(open(my $fh, '<' . $dataf), 'Open file'); |
| 64 | while (!eof($fh)) { |
| 65 | $data .= <$fh> |
Akron | eac374d | 2020-07-07 09:00:44 +0200 | [diff] [blame] | 66 | }; |
Akron | eac374d | 2020-07-07 09:00:44 +0200 | [diff] [blame] | 67 | |
Peter Harders | 1d65f94 | 2020-07-22 23:31:00 +0200 | [diff] [blame^] | 68 | ## DEBUG |
| 69 | #my @layers = PerlIO::get_layers($fh); # see 'man PerlIO': Querying the layers of filehandles |
| 70 | #foreach my $l(@layers){print STDERR "DEBUG (filehandle layer): $l\n"}; |
Akron | eac374d | 2020-07-07 09:00:44 +0200 | [diff] [blame] | 71 | |
Peter Harders | 1d65f94 | 2020-07-22 23:31:00 +0200 | [diff] [blame^] | 72 | ok(close($fh), 'Close file'); |
| 73 | |
| 74 | is(134996, length($data)); # mind that each UTF-8 character counts only once |
| 75 | |
| 76 | ## note |
| 77 | # check different output with/without additional UTF-8 layer |
| 78 | # echo "„Wikipedia-Artikel brauchen Fotos“" | perl -ne 'chomp; for($i=0;$i<length;$i++){$c=substr $_,$i,1; print ">$c<\n" if $c=~/\p{Punct}/}' |
| 79 | # echo "„Wikipedia-Artikel brauchen Fotos“" | perl -ne 'use open qw(:std :utf8); chomp; for($i=0;$i<length;$i++){$c=substr $_,$i,1; print ">$c<\n" if $c=~/\p{Punct}/}' |
| 80 | |
| 81 | # TODO: With then necessary open-pragma (see above), this is extremely slow ... Where's the bottleneck? |
| 82 | # No performance-issue, when piping 'wikipedia.txt' into a perl one-liner (also not, when using while-loop from Aggressive.pm): |
| 83 | # cat t/data/wikipedia.txt | perl -ne 'use open qw(:std :utf8); chomp; for($i=0;$i<length;$i++){$c=substr $_,$i,1; print ">$c<\n" if $c=~/\p{Punct}/}' >/dev/null |
| 84 | # cat t/data/wikipedia.txt | perl -ne 'use open qw(:std :utf8); chomp; while($_=~/([^\p{Punct} \x{9}\n]+)(?:(\p{Punct})|(?:[ \x{9}\n])?)|(\p{Punct})/gx){ print "$1\n" if $1}' >/dev/null |
| 85 | diag("DEBUG: Tokenizing Wikipedia Text (134K). Because of an additional PerlIO layer (utf8) on the filehandle, this takes significant more time. Please wait ...\n"); |
Akron | d962747 | 2020-07-09 16:53:09 +0200 | [diff] [blame] | 86 | $aggr->reset->tokenize($data); |
Peter Harders | 1d65f94 | 2020-07-22 23:31:00 +0200 | [diff] [blame^] | 87 | is_deeply([@{$aggr}[0..25]], [1,7,8,12,14,18,19,22,23,27,28,38,39,40,40,49,49,50,50,57,58,66,67,72,72,73]); |
| 88 | is(47112, scalar(@$aggr)); |
Akron | eac374d | 2020-07-07 09:00:44 +0200 | [diff] [blame] | 89 | |
Peter Harders | 1d65f94 | 2020-07-22 23:31:00 +0200 | [diff] [blame^] | 90 | diag("DEBUG: Tokenizing Wikipedia Text (134K). Because of an additional PerlIO layer (utf8) on the filehandle, this takes significant more time. Please wait ...\n"); |
Akron | d962747 | 2020-07-09 16:53:09 +0200 | [diff] [blame] | 91 | $cons->reset->tokenize($data); |
Peter Harders | 1d65f94 | 2020-07-22 23:31:00 +0200 | [diff] [blame^] | 92 | is_deeply([@{$cons}[0..21]], [1,7,8,12,14,18,19,22,23,27,28,38,39,40,40,57,58,66,67,72,72,73]); |
| 93 | is(43218, scalar(@$cons)); |
Akron | eac374d | 2020-07-07 09:00:44 +0200 | [diff] [blame] | 94 | |
| 95 | done_testing; |