blob: 42f784014e9ac1ba1d984cc2f05ae12b4b9f875f [file] [log] [blame]
Akroneac374d2020-07-07 09:00:44 +02001use strict;
2use warnings;
Peter Harders1d65f942020-07-22 23:31:00 +02003#use open qw(:std :utf8); # see perlunifaq: What is the difference between ":encoding" and ":utf8"?
4use open qw(:std :encoding(UTF-8)); # assume utf-8 encoding (see utf8 in Test::More)
Akroneac374d2020-07-07 09:00:44 +02005use Test::More;
6use File::Basename 'dirname';
7use File::Spec::Functions qw/catfile/;
Akroneac374d2020-07-07 09:00:44 +02008
9use FindBin;
10BEGIN {
11 unshift @INC, "$FindBin::Bin/../lib";
12};
13
Akrond9627472020-07-09 16:53:09 +020014require_ok('KorAP::XML::TEI::Tokenizer::Aggressive');
15require_ok('KorAP::XML::TEI::Tokenizer::Conservative');
Akroneac374d2020-07-07 09:00:44 +020016
17# Test aggressive
Akrond9627472020-07-09 16:53:09 +020018my $aggr = KorAP::XML::TEI::Tokenizer::Aggressive->new;
19$aggr->tokenize("Der alte Mann");
Akron510a88c2020-07-07 10:16:50 +020020is_deeply($aggr, [0,3,4,8,9,13]);
21
Akrond9627472020-07-09 16:53:09 +020022$aggr->reset->tokenize("Der alte bzw. der grau-melierte Mann");
Akron510a88c2020-07-07 10:16:50 +020023is_deeply($aggr, [0,3,4,8,9,12,12,13,14,17,18,22,22,23,23,31,32,36]);
Akroneac374d2020-07-07 09:00:44 +020024
25# Test conservative
Akrond9627472020-07-09 16:53:09 +020026my $cons = KorAP::XML::TEI::Tokenizer::Conservative->new;
27$cons->tokenize("Der alte Mann");
Akroneac374d2020-07-07 09:00:44 +020028is_deeply($cons, [0,3,4,8,9,13]);
29
Akrond9627472020-07-09 16:53:09 +020030$cons->reset->tokenize("Der alte bzw. der grau-melierte Mann");
Akron510a88c2020-07-07 10:16:50 +020031is_deeply($cons, [0,3,4,8,9,12,12,13,14,17,18,31,32,36]);
32
Peter Harders71f072b2020-07-15 14:15:01 +020033$cons->reset->tokenize(" Der alte bzw. der grau-melierte Mann");
34is_deeply($cons, [2,5,6,10,11,14,14,15,16,19,20,33,34,38]);
35
Akrond9627472020-07-09 16:53:09 +020036$cons->reset->tokenize(". Der");
Akron510a88c2020-07-07 10:16:50 +020037is_deeply($cons, [0,1,2,5]);
38
Akrond9627472020-07-09 16:53:09 +020039$cons->reset->tokenize(" . Der");
Akron510a88c2020-07-07 10:16:50 +020040is_deeply($cons, [1,2,3,6]);
41
Akrond9627472020-07-09 16:53:09 +020042$cons->reset->tokenize(" . Der");
Akron510a88c2020-07-07 10:16:50 +020043is_deeply($cons, [3,4,5,8]);
44
Akrond9627472020-07-09 16:53:09 +020045$cons->reset->tokenize("... Der");
Akron510a88c2020-07-07 10:16:50 +020046is_deeply($cons, [0,1,1,2,2,3,4,7]);
47
Peter Harders854a1152020-07-22 22:48:02 +020048# done: '.' is now tokenized
Akrond9627472020-07-09 16:53:09 +020049$cons->reset->tokenize(".Der");
Peter Harders854a1152020-07-22 22:48:02 +020050is_deeply($cons, [0,1,1,4]);
Akron510a88c2020-07-07 10:16:50 +020051
Akrond9627472020-07-09 16:53:09 +020052$cons->reset->tokenize(".Der.... ");
Peter Harders854a1152020-07-22 22:48:02 +020053is_deeply($cons, [0,1,1,4,4,5,5,6,6,7,7,8]);
Akron510a88c2020-07-07 10:16:50 +020054
Akrond9627472020-07-09 16:53:09 +020055$cons->reset->tokenize("..Der.... ");
Akron510a88c2020-07-07 10:16:50 +020056is_deeply($cons, [0,1,1,2,2,5,5,6,6,7,7,8,8,9]);
57
Peter Harders854a1152020-07-22 22:48:02 +020058$cons->reset->tokenize(". Der.... ");
59is_deeply($cons, [0,1,2,5,5,6,6,7,7,8,8,9]);
Akroneac374d2020-07-07 09:00:44 +020060
Peter Harders854a1152020-07-22 22:48:02 +020061$cons->reset->tokenize(". .Der.... ");
62is_deeply($cons, [0,1,2,3,3,6,6,7,7,8,8,9,9,10]);
63
64$cons->reset->tokenize("Der\talte\nMann");
65is_deeply($cons, [0,3,4,8,9,13]);
66
67
68##### TODO: big wikipedia.txt leads to very slow processing => use smaller test file as temporary solution (see below)
69## Test data
70#my $dataf = catfile(dirname(__FILE__), 'data', 'wikipedia.txt');
71#my $data = '';
72#
73#ok(open(my $fh, '<' . $dataf), 'Open file wikipedia.txt');
74#while (!eof($fh)) {
75# $data .= <$fh>
76#};
77#
78### DEBUG
79##my @layers = PerlIO::get_layers($fh); # see 'man PerlIO': Querying the layers of filehandles
80##foreach my $l(@layers){print STDERR "DEBUG (filehandle layer): $l\n"};
81#
82#ok(close($fh), 'Close file wikipedia.txt');
83#
84#is(134996, length($data)); # mind that each UTF-8 character counts only once
85#
86## TODO: With then necessary open-pragma (see above), this is extremely slow ... Where's the bottleneck?
87## No performance-issue, when piping 'wikipedia.txt' into a perl one-liner (also not, when using while-loop from Aggressive.pm):
88## cat t/data/wikipedia.txt | perl -ne 'use open qw(:std :utf8); chomp; for($i=0;$i<length;$i++){$c=substr $_,$i,1; print ">$c<\n" if $c=~/\p{Punct}/}' >/dev/null
89## cat t/data/wikipedia.txt | perl -ne 'use open qw(:std :utf8); chomp; while($_=~/([^\p{Punct} \x{9}\n]+)(?:(\p{Punct})|(?:[ \x{9}\n])?)|(\p{Punct})/gx){ print "$1\n" if $1}' >/dev/null
90## note
91## check different output with/without additional UTF-8 layer
92## echo "„Wikipedia-Artikel brauchen Fotos“" | perl -ne 'chomp; for($i=0;$i<length;$i++){$c=substr $_,$i,1; print ">$c<\n" if $c=~/\p{Punct}/}'
93## echo "„Wikipedia-Artikel brauchen Fotos“" | perl -ne 'use open qw(:std :utf8); chomp; for($i=0;$i<length;$i++){$c=substr $_,$i,1; print ">$c<\n" if $c=~/\p{Punct}/}'
94#
95#diag("DEBUG (aggr): Tokenizing Wikipedia Text (134K). Because of an additional PerlIO layer (utf8) on the filehandle, this takes significant more time. Please wait ...\n");
96#$aggr->reset->tokenize($data);
97#is_deeply([@{$aggr}[0..25]], [1,7,8,12,14,18,19,22,23,27,28,38,39,40,40,49,49,50,50,57,58,66,67,72,72,73]);
98#is(47112, scalar(@$aggr));
99#
100#diag("DEBUG (cons): Tokenizing Wikipedia Text (134K). Because of an additional PerlIO layer (utf8) on the filehandle, this takes significant more time. Please wait ...\n");
101#$cons->reset->tokenize($data);
102#is_deeply([@{$cons}[0..21]], [1,7,8,12,14,18,19,22,23,27,28,38,39,40,40,57,58,66,67,72,72,73]);
103#is(42412, scalar(@$cons));
104#
105## check tokenization of 'Community-Ämter aufgestiegen'
106## from @{cons}[19518] (=66070) to @{cons}[19519] (=66085) => 'Community-Ämter'
107## from @{cons}[19520] (=66086) to @{cons}[19521] (=66098) => 'aufgestiegen'
108#my @vals_got=(66070,66085,66086,66098);
109#my @vals_exp; push @vals_exp, @{$cons}[$_] for(19518,19519,19520,19521);
110#is_deeply([@vals_exp], [@vals_got]);
111##
112##### TODO: use smaller test file as temporary workaround (until problem solved)
113$cons->reset->tokenize("Community-\xc4mter aufgestiegen");
114is_deeply($cons, [0,15,16,28]);
115
116my $dataf = catfile(dirname(__FILE__), 'data', 'wikipedia_small.txt');
117my $data = '';
118ok(open(my $fh, '<' . $dataf), 'Open file wikipedia_small.txt');
Peter Harders1d65f942020-07-22 23:31:00 +0200119while (!eof($fh)) {
120 $data .= <$fh>
Akroneac374d2020-07-07 09:00:44 +0200121};
Peter Harders854a1152020-07-22 22:48:02 +0200122ok(close($fh), 'Close file wikipedia_small.txt');
Akroneac374d2020-07-07 09:00:44 +0200123
Akrond9627472020-07-09 16:53:09 +0200124$aggr->reset->tokenize($data);
Peter Harders1d65f942020-07-22 23:31:00 +0200125is_deeply([@{$aggr}[0..25]], [1,7,8,12,14,18,19,22,23,27,28,38,39,40,40,49,49,50,50,57,58,66,67,72,72,73]);
Peter Harders854a1152020-07-22 22:48:02 +0200126is(366, scalar(@$aggr));
Akroneac374d2020-07-07 09:00:44 +0200127
Akrond9627472020-07-09 16:53:09 +0200128$cons->reset->tokenize($data);
Peter Harders1d65f942020-07-22 23:31:00 +0200129is_deeply([@{$cons}[0..21]], [1,7,8,12,14,18,19,22,23,27,28,38,39,40,40,57,58,66,67,72,72,73]);
Peter Harders854a1152020-07-22 22:48:02 +0200130is(302, scalar(@$cons));
131#####
132
Akroneac374d2020-07-07 09:00:44 +0200133
134done_testing;