Nils Diewald | 2db9ad0 | 2013-10-29 19:26:43 +0000 | [diff] [blame] | 1 | #!/usr/bin/env perl |
| 2 | use strict; |
| 3 | use warnings; |
Akron | a977e17 | 2016-02-23 17:41:41 +0100 | [diff] [blame] | 4 | use FindBin; |
| 5 | BEGIN { unshift @INC, "$FindBin::Bin/../lib" }; |
| 6 | use File::Spec::Functions qw/catfile catdir/; |
| 7 | use Getopt::Long qw/GetOptions :config no_auto_abbrev/; |
Nils Diewald | 7364d1f | 2013-11-05 19:26:35 +0000 | [diff] [blame] | 8 | use Benchmark qw/:hireswallclock/; |
| 9 | use IO::Compress::Gzip qw/$GzipError/; |
Nils Diewald | 2db9ad0 | 2013-10-29 19:26:43 +0000 | [diff] [blame] | 10 | use Log::Log4perl; |
Akron | a977e17 | 2016-02-23 17:41:41 +0100 | [diff] [blame] | 11 | use Pod::Usage; |
| 12 | use Directory::Iterator; |
Akron | 9a04c71 | 2016-02-05 19:40:05 +0100 | [diff] [blame] | 13 | use KorAP::XML::Krill; |
Akron | a977e17 | 2016-02-23 17:41:41 +0100 | [diff] [blame] | 14 | use KorAP::XML::Archive; |
Akron | 9a04c71 | 2016-02-05 19:40:05 +0100 | [diff] [blame] | 15 | use KorAP::XML::Tokenizer; |
Akron | a977e17 | 2016-02-23 17:41:41 +0100 | [diff] [blame] | 16 | use Parallel::ForkManager; |
Akron | 97be380 | 2016-03-07 23:36:27 +0100 | [diff] [blame] | 17 | # TODO: use Parallel::Loops |
Akron | 9a04c71 | 2016-02-05 19:40:05 +0100 | [diff] [blame] | 18 | |
Akron | a977e17 | 2016-02-23 17:41:41 +0100 | [diff] [blame] | 19 | # CHANGES: |
| 20 | # ---------------------------------------------------------- |
| 21 | # 2013/11/25 |
| 22 | # - Initial release |
| 23 | # |
| 24 | # 2014/10/29 |
| 25 | # - Merges foundry data to create indexer friendly documents |
| 26 | # |
Akron | 9a04c71 | 2016-02-05 19:40:05 +0100 | [diff] [blame] | 27 | # 2016/02/04 |
| 28 | # - renamed to korapxml2krill |
| 29 | # - added Schreibgebrauch support |
Akron | 9078bb9 | 2016-02-12 19:09:06 +0100 | [diff] [blame] | 30 | # |
| 31 | # 2016/02/12 |
| 32 | # - fixed foundry skipping |
Akron | a977e17 | 2016-02-23 17:41:41 +0100 | [diff] [blame] | 33 | # - Support overwrite in archive processing |
Akron | dba4771 | 2016-02-14 23:06:48 +0100 | [diff] [blame] | 34 | # |
| 35 | # 2016/02/14 |
| 36 | # - Added version information |
Akron | a977e17 | 2016-02-23 17:41:41 +0100 | [diff] [blame] | 37 | # - Added support for archive files |
| 38 | # |
| 39 | # 2016/02/15 |
| 40 | # - Fixed temporary directory bug |
| 41 | # - Improved skipping before unzipping |
| 42 | # - Added EXPERIMENTAL concurrency support |
| 43 | # |
| 44 | # 2016/02/23 |
| 45 | # - Merge korapxml2krill and korapxml2krill_dir |
Akron | 2978ddd | 2016-02-27 10:54:26 +0100 | [diff] [blame] | 46 | # |
| 47 | # 2016/02/27 |
| 48 | # - Added extract function |
Akron | a977e17 | 2016-02-23 17:41:41 +0100 | [diff] [blame] | 49 | # ---------------------------------------------------------- |
Akron | 9078bb9 | 2016-02-12 19:09:06 +0100 | [diff] [blame] | 50 | |
Akron | 315d21f | 2016-03-03 18:24:53 +0100 | [diff] [blame] | 51 | our $LAST_CHANGE = '2016/03/02'; |
Akron | a977e17 | 2016-02-23 17:41:41 +0100 | [diff] [blame] | 52 | our $LOCAL = $FindBin::Bin; |
| 53 | our $VERSION_MSG = <<"VERSION"; |
| 54 | Version $KorAP::XML::Krill::VERSION - diewald\@ids-mannheim.de - $LAST_CHANGE |
| 55 | VERSION |
| 56 | |
| 57 | |
| 58 | # Parse comand |
| 59 | my $cmd; |
| 60 | our @ARGV; |
| 61 | if ($ARGV[0] && index($ARGV[0], '-') != 0) { |
| 62 | $cmd = shift @ARGV; |
Akron | dba4771 | 2016-02-14 23:06:48 +0100 | [diff] [blame] | 63 | }; |
Akron | 9a04c71 | 2016-02-05 19:40:05 +0100 | [diff] [blame] | 64 | |
Akron | 2978ddd | 2016-02-27 10:54:26 +0100 | [diff] [blame] | 65 | my (@skip, @sigle); |
| 66 | |
Akron | a977e17 | 2016-02-23 17:41:41 +0100 | [diff] [blame] | 67 | # Parse options from the command line |
Nils Diewald | 7364d1f | 2013-11-05 19:26:35 +0000 | [diff] [blame] | 68 | GetOptions( |
Akron | a977e17 | 2016-02-23 17:41:41 +0100 | [diff] [blame] | 69 | 'input|i=s' => \(my $input), |
| 70 | 'output|o=s' => \(my $output), |
| 71 | 'overwrite|w' => \(my $overwrite), |
| 72 | 'human|m' => \(my $text), |
| 73 | 'token|t=s' => \(my $token_base), |
| 74 | 'gzip|z' => \(my $gzip), |
Akron | 2978ddd | 2016-02-27 10:54:26 +0100 | [diff] [blame] | 75 | 'skip|s=s' => \@skip, |
| 76 | 'sigle|sg=s' => \@sigle, |
Akron | a977e17 | 2016-02-23 17:41:41 +0100 | [diff] [blame] | 77 | 'log|l=s' => \(my $log_level = 'ERROR'), |
| 78 | 'allow|a=s' => \(my @allow), |
| 79 | 'primary|p!' => \(my $primary), |
| 80 | 'pretty|y' => \(my $pretty), |
| 81 | 'jobs|j=i' => \(my $jobs = 0), |
| 82 | 'help|h' => sub { |
| 83 | pod2usage( |
| 84 | -sections => 'NAME|SYNOPSIS|ARGUMENTS|OPTIONS', |
| 85 | -verbose => 99, |
| 86 | -msg => $VERSION_MSG, |
| 87 | ); |
| 88 | }, |
| 89 | 'version|v' => sub { |
| 90 | pod2usage( |
| 91 | -verbose => 0, |
| 92 | -msg => $VERSION_MSG |
| 93 | ) |
| 94 | } |
Nils Diewald | 7364d1f | 2013-11-05 19:26:35 +0000 | [diff] [blame] | 95 | ); |
| 96 | |
Akron | a977e17 | 2016-02-23 17:41:41 +0100 | [diff] [blame] | 97 | my %ERROR_HASH = ( |
| 98 | -sections => 'NAME|SYNOPSIS|ARGUMENTS|OPTIONS', |
| 99 | -verbose => 99, |
| 100 | -msg => $VERSION_MSG, |
| 101 | -exit => 1 |
| 102 | ); |
Nils Diewald | 7364d1f | 2013-11-05 19:26:35 +0000 | [diff] [blame] | 103 | |
Akron | a977e17 | 2016-02-23 17:41:41 +0100 | [diff] [blame] | 104 | # Input has to be defined |
| 105 | pod2usage(%ERROR_HASH) unless $input; |
Nils Diewald | 7364d1f | 2013-11-05 19:26:35 +0000 | [diff] [blame] | 106 | |
Nils Diewald | 7364d1f | 2013-11-05 19:26:35 +0000 | [diff] [blame] | 107 | |
Akron | a977e17 | 2016-02-23 17:41:41 +0100 | [diff] [blame] | 108 | # Initialize log4perl object |
Nils Diewald | 7364d1f | 2013-11-05 19:26:35 +0000 | [diff] [blame] | 109 | Log::Log4perl->init({ |
| 110 | 'log4perl.rootLogger' => uc($log_level) . ', STDERR', |
| 111 | 'log4perl.appender.STDERR' => 'Log::Log4perl::Appender::ScreenColoredLevels', |
| 112 | 'log4perl.appender.STDERR.layout' => 'PatternLayout', |
| 113 | 'log4perl.appender.STDERR.layout.ConversionPattern' => '[%r] %F %L %c - %m%n' |
| 114 | }); |
| 115 | |
| 116 | my $log = Log::Log4perl->get_logger('main'); |
| 117 | |
Akron | a977e17 | 2016-02-23 17:41:41 +0100 | [diff] [blame] | 118 | |
| 119 | # Get file name based on path information |
| 120 | sub get_file_name ($) { |
| 121 | my $file = shift; |
| 122 | $file =~ s/^?\/?$input//; |
| 123 | $file =~ tr/\//-/; |
| 124 | $file =~ s{^-+}{}; |
| 125 | return $file; |
Nils Diewald | 5b4865f | 2014-11-05 18:20:50 +0000 | [diff] [blame] | 126 | }; |
| 127 | |
Akron | a977e17 | 2016-02-23 17:41:41 +0100 | [diff] [blame] | 128 | |
| 129 | # Write file |
| 130 | sub write_file { |
| 131 | my $anno = shift; |
| 132 | my $file = get_file_name $anno; |
| 133 | |
| 134 | # TODO: This should be done directly with a data structure! KorAP::XML::Wrap |
| 135 | |
| 136 | my $call = 'perl ' . $LOCAL . '/korapxml2krill -i ' . |
| 137 | $anno . ' -o ' . $output . '/' . $file . '.json'; |
| 138 | $call .= '.gz -z' if $gzip; |
| 139 | $call .= ' -m' if $text; |
| 140 | $call .= ' -w' if $overwrite; |
| 141 | $call .= ' -t ' . $token_base if $token_base; |
| 142 | $call .= ' -l ' . $log_level if $log_level; |
| 143 | $call .= ' --no-primary ' if $primary; |
| 144 | $call .= ' -y ' . $pretty if $pretty; |
| 145 | $call .= ' -a ' . $_ foreach @allow; |
| 146 | $call .= ' -s ' . $_ foreach @skip; |
| 147 | system($call); |
| 148 | return "$file"; |
Nils Diewald | 7364d1f | 2013-11-05 19:26:35 +0000 | [diff] [blame] | 149 | }; |
| 150 | |
Nils Diewald | 2db9ad0 | 2013-10-29 19:26:43 +0000 | [diff] [blame] | 151 | |
Akron | 2978ddd | 2016-02-27 10:54:26 +0100 | [diff] [blame] | 152 | # Convert sigle to path construct |
| 153 | s!^\s*([^_]+?)_([^\.]+?)\.(.+?)\s*$!$1/$2/$3! foreach @sigle; |
| 154 | |
Akron | a977e17 | 2016-02-23 17:41:41 +0100 | [diff] [blame] | 155 | # Process a single file |
| 156 | unless ($cmd) { |
Nils Diewald | 5b4865f | 2014-11-05 18:20:50 +0000 | [diff] [blame] | 157 | |
Akron | a977e17 | 2016-02-23 17:41:41 +0100 | [diff] [blame] | 158 | # Can't print gzip to STDOUT |
| 159 | pod2usage(%ERROR_HASH) if $gzip && !$output; |
Nils Diewald | 2db9ad0 | 2013-10-29 19:26:43 +0000 | [diff] [blame] | 160 | |
Akron | a977e17 | 2016-02-23 17:41:41 +0100 | [diff] [blame] | 161 | my %skip; |
| 162 | $skip{lc($_)} = 1 foreach @skip; |
Nils Diewald | 2db9ad0 | 2013-10-29 19:26:43 +0000 | [diff] [blame] | 163 | |
| 164 | |
Akron | a977e17 | 2016-02-23 17:41:41 +0100 | [diff] [blame] | 165 | # Ignore processing |
| 166 | if (!$overwrite && $output && -e $output) { |
| 167 | $log->trace($output . ' already exists'); |
| 168 | exit(0); |
Nils Diewald | 7364d1f | 2013-11-05 19:26:35 +0000 | [diff] [blame] | 169 | }; |
Akron | a977e17 | 2016-02-23 17:41:41 +0100 | [diff] [blame] | 170 | |
| 171 | BEGIN { |
| 172 | $main::TIME = Benchmark->new; |
| 173 | $main::LAST_STOP = Benchmark->new; |
| 174 | }; |
| 175 | |
| 176 | sub stop_time { |
| 177 | my $new = Benchmark->new; |
| 178 | $log->trace( |
| 179 | 'The code took: '. |
| 180 | timestr(timediff($new, $main::LAST_STOP)) . |
| 181 | ' (overall: ' . timestr(timediff($new, $main::TIME)) . ')' |
| 182 | ); |
| 183 | $main::LAST_STOP = $new; |
| 184 | }; |
| 185 | |
| 186 | # Create and parse new document |
| 187 | $input =~ s{([^/])$}{$1/}; |
| 188 | my $doc = KorAP::XML::Krill->new( path => $input ); |
| 189 | |
| 190 | unless ($doc->parse) { |
| 191 | $log->warn($output . " can't be processed - no document data"); |
| 192 | exit(0); |
| 193 | }; |
| 194 | |
| 195 | my ($token_base_foundry, $token_base_layer) = (qw/OpenNLP Tokens/); |
| 196 | if ($token_base) { |
| 197 | ($token_base_foundry, $token_base_layer) = split /#/, $token_base; |
| 198 | }; |
| 199 | |
| 200 | # Get tokenization |
| 201 | my $tokens = KorAP::XML::Tokenizer->new( |
| 202 | path => $doc->path, |
| 203 | doc => $doc, |
| 204 | foundry => $token_base_foundry, |
| 205 | layer => $token_base_layer, |
| 206 | name => 'tokens' |
| 207 | ); |
| 208 | |
| 209 | # Unable to process base tokenization |
| 210 | unless ($tokens->parse) { |
| 211 | $log->error($output . " can't be processed - no base tokenization"); |
| 212 | exit(0); |
| 213 | }; |
| 214 | |
| 215 | my @layers; |
| 216 | push(@layers, ['Base', 'Sentences']); |
| 217 | push(@layers, ['Base', 'Paragraphs']); |
| 218 | |
| 219 | # Connexor |
| 220 | push(@layers, ['Connexor', 'Morpho']); |
| 221 | push(@layers, ['Connexor', 'Syntax']); |
| 222 | push(@layers, ['Connexor', 'Phrase']); |
| 223 | push(@layers, ['Connexor', 'Sentences']); |
| 224 | |
| 225 | # CoreNLP |
| 226 | push(@layers, ['CoreNLP', 'NamedEntities']); |
| 227 | push(@layers, ['CoreNLP', 'Sentences']); |
| 228 | push(@layers, ['CoreNLP', 'Morpho']); |
| 229 | push(@layers, ['CoreNLP', 'Constituency']); |
| 230 | |
| 231 | # DeReKo |
| 232 | push(@layers, ['DeReKo', 'Structure']); |
| 233 | |
| 234 | # Glemm |
| 235 | push(@layers, ['Glemm', 'Morpho']); |
| 236 | |
| 237 | # Malt |
| 238 | # push(@layers, ['Malt', 'Dependency']); |
| 239 | |
| 240 | # Mate |
| 241 | push(@layers, ['Mate', 'Morpho']); |
| 242 | push(@layers, ['Mate', 'Dependency']); |
| 243 | |
| 244 | # OpenNLP |
| 245 | push(@layers, ['OpenNLP', 'Morpho']); |
| 246 | push(@layers, ['OpenNLP', 'Sentences']); |
| 247 | |
| 248 | # Schreibgebrauch |
| 249 | push(@layers, ['Sgbr', 'Lemma']); |
| 250 | push(@layers, ['Sgbr', 'Morpho']); |
| 251 | |
| 252 | # TreeTagger |
| 253 | push(@layers, ['TreeTagger', 'Morpho']); |
| 254 | push(@layers, ['TreeTagger', 'Sentences']); |
| 255 | |
| 256 | # XIP |
| 257 | push(@layers, ['XIP', 'Morpho']); |
| 258 | push(@layers, ['XIP', 'Constituency']); |
| 259 | push(@layers, ['XIP', 'Sentences']); |
| 260 | push(@layers, ['XIP', 'Dependency']); |
| 261 | |
| 262 | |
| 263 | if ($skip{'#all'}) { |
| 264 | foreach (@allow) { |
| 265 | $tokens->add(split('#', $_)); |
Nils Diewald | 7364d1f | 2013-11-05 19:26:35 +0000 | [diff] [blame] | 266 | stop_time; |
Nils Diewald | 2db9ad0 | 2013-10-29 19:26:43 +0000 | [diff] [blame] | 267 | }; |
Nils Diewald | 7364d1f | 2013-11-05 19:26:35 +0000 | [diff] [blame] | 268 | } |
| 269 | else { |
Akron | a977e17 | 2016-02-23 17:41:41 +0100 | [diff] [blame] | 270 | # Add to index file - respect skipping |
| 271 | foreach my $info (@layers) { |
| 272 | # Skip if Foundry or Foundry#Layer should be skipped |
| 273 | unless ($skip{lc($info->[0])} || $skip{lc($info->[0]) . '#' . lc($info->[1])}) { |
| 274 | $tokens->add(@$info); |
| 275 | stop_time; |
| 276 | }; |
| 277 | }; |
Nils Diewald | 2db9ad0 | 2013-10-29 19:26:43 +0000 | [diff] [blame] | 278 | }; |
| 279 | |
Akron | a977e17 | 2016-02-23 17:41:41 +0100 | [diff] [blame] | 280 | my $file; |
| 281 | |
| 282 | my $print_text = $text ? $tokens->to_string($primary) : |
| 283 | ($pretty ? $tokens->to_pretty_json($primary) : $tokens->to_json($primary)); |
| 284 | |
| 285 | if ($output) { |
| 286 | |
| 287 | if ($gzip) { |
| 288 | $file = IO::Compress::Gzip->new($output, Minimal => 1); |
| 289 | } |
| 290 | else { |
| 291 | $file = IO::File->new($output, "w"); |
| 292 | }; |
| 293 | |
| 294 | $file->print($print_text); |
| 295 | $file->close; |
| 296 | } |
| 297 | |
| 298 | else { |
| 299 | print $print_text . "\n"; |
| 300 | }; |
| 301 | |
| 302 | stop_time; |
Nils Diewald | 7364d1f | 2013-11-05 19:26:35 +0000 | [diff] [blame] | 303 | } |
Nils Diewald | 5b4865f | 2014-11-05 18:20:50 +0000 | [diff] [blame] | 304 | |
Akron | 2978ddd | 2016-02-27 10:54:26 +0100 | [diff] [blame] | 305 | # Extract XML files |
| 306 | elsif ($cmd eq 'extract') { |
| 307 | |
| 308 | pod2usage(%ERROR_HASH) unless $output; |
| 309 | |
| 310 | # TODO: Support sigles and full archives |
| 311 | |
| 312 | if ($output && (!-e $output || !-d $output)) { |
| 313 | print "Directory '$output' does not exist.\n\n"; |
| 314 | exit(0); |
| 315 | }; |
| 316 | |
| 317 | if (-f($input) && (my $archive = KorAP::XML::Archive->new($input))) { |
| 318 | |
| 319 | unless ($archive->test_unzip) { |
| 320 | print "Unzip is not installed or incompatible.\n\n"; |
| 321 | exit(1); |
| 322 | }; |
| 323 | |
| 324 | # Test will be skipped |
| 325 | |
| 326 | # Iterate over all given sigles and extract |
| 327 | foreach (@sigle) { |
| 328 | print "$_ "; |
| 329 | print '' . ($archive->extract('./'. $_, $output) ? '' : 'not '); |
| 330 | print "extracted.\n"; |
| 331 | }; |
| 332 | |
| 333 | print "\n"; |
| 334 | exit(1); |
| 335 | }; |
| 336 | } |
| 337 | |
Akron | a977e17 | 2016-02-23 17:41:41 +0100 | [diff] [blame] | 338 | # Process an archive |
| 339 | elsif ($cmd eq 'archive') { |
Nils Diewald | 2db9ad0 | 2013-10-29 19:26:43 +0000 | [diff] [blame] | 340 | |
Akron | 2978ddd | 2016-02-27 10:54:26 +0100 | [diff] [blame] | 341 | # TODO: Support sigles |
| 342 | |
Akron | a977e17 | 2016-02-23 17:41:41 +0100 | [diff] [blame] | 343 | pod2usage(%ERROR_HASH) unless $output; |
| 344 | |
| 345 | if ($output && (!-e $output || !-d $output)) { |
| 346 | print "Directory '$output' does not exist.\n\n"; |
| 347 | exit(0); |
| 348 | }; |
| 349 | |
| 350 | # Zero means: everything runs in the parent process |
| 351 | my $pool = Parallel::ForkManager->new($jobs); |
| 352 | |
| 353 | my $count = 0; # Texts to process |
| 354 | my $iter = 1; # Current text in process |
| 355 | |
| 356 | # Report on fork message |
| 357 | $pool->run_on_finish ( |
| 358 | sub { |
| 359 | my ($pid, $code) = shift; |
| 360 | my $data = pop; |
| 361 | print 'Convert ['. ($jobs > 0 ? "$pid:" : '') . |
| 362 | ($iter++) . "/$count]" . |
| 363 | ($code ? " $code" : '') . |
| 364 | " $$data\n"; |
| 365 | } |
| 366 | ); |
| 367 | |
| 368 | my $t; |
| 369 | print "Reading data ...\n"; |
| 370 | |
| 371 | # Input is a directory |
| 372 | if (-d $input) { |
| 373 | my $it = Directory::Iterator->new($input); |
| 374 | my @dirs; |
| 375 | my $dir; |
| 376 | |
| 377 | while (1) { |
| 378 | if (!$it->is_directory && ($dir = $it->get) && $dir =~ s{/data\.xml$}{}) { |
| 379 | push @dirs, $dir; |
| 380 | $it->prune; |
| 381 | }; |
| 382 | last unless $it->next; |
| 383 | }; |
| 384 | |
| 385 | print "Start processing ...\n"; |
| 386 | $t = Benchmark->new; |
| 387 | $count = scalar @dirs; |
| 388 | |
| 389 | DIRECTORY_LOOP: |
| 390 | for (my $i = 0; $i < $count; $i++) { |
| 391 | |
| 392 | unless ($overwrite) { |
| 393 | my $filename = catfile( |
| 394 | $output, |
| 395 | get_file_name($dirs[$i]) . '.json' . ($gzip ? '.gz' : '') |
| 396 | ); |
| 397 | |
| 398 | if (-e $filename) { |
| 399 | $iter++; |
| 400 | print "Skip $filename\n"; |
| 401 | next; |
| 402 | }; |
| 403 | }; |
| 404 | |
| 405 | # Get the next fork |
| 406 | my $pid = $pool->start and next DIRECTORY_LOOP; |
| 407 | my $msg; |
| 408 | |
| 409 | $msg = write_file($dirs[$i]); |
| 410 | $pool->finish(0, \$msg); |
| 411 | }; |
| 412 | } |
| 413 | |
| 414 | # Input is a file |
| 415 | elsif (-f($input) && (my $archive = KorAP::XML::Archive->new($input))) { |
| 416 | unless ($archive->test_unzip) { |
| 417 | print "Unzip is not installed or incompatible.\n\n"; |
| 418 | exit(1); |
| 419 | }; |
| 420 | |
| 421 | unless ($archive->test) { |
| 422 | print "Zip archive not compatible.\n\n"; |
| 423 | exit(1); |
| 424 | }; |
| 425 | |
| 426 | print "Start processing ...\n"; |
| 427 | $t = Benchmark->new; |
| 428 | my @dirs = $archive->list_texts; |
| 429 | $count = scalar @dirs; |
| 430 | |
| 431 | ARCHIVE_LOOP: |
| 432 | for (my $i = 0; $i < $count; $i++) { |
| 433 | |
| 434 | # Split path information |
| 435 | my ($prefix, $corpus, $doc, $text) = $archive->split_path($dirs[$i]); |
| 436 | |
| 437 | unless ($overwrite) { |
| 438 | my $filename = catfile( |
| 439 | $output, |
| 440 | get_file_name(catdir($doc, $text)) . '.json' . ($gzip ? '.gz' : '') |
| 441 | ); |
| 442 | |
| 443 | if (-e $filename) { |
| 444 | $iter++; |
| 445 | print "Skip $filename\n"; |
| 446 | next; |
| 447 | }; |
| 448 | }; |
| 449 | |
| 450 | # Get the next fork |
| 451 | my $pid = $pool->start and next ARCHIVE_LOOP; |
| 452 | |
| 453 | # Create temporary file |
| 454 | my $temp = File::Temp->newdir; |
| 455 | |
| 456 | my $msg; |
| 457 | |
| 458 | # Extract from archive |
| 459 | if ($archive->extract($dirs[$i], $temp)) { |
| 460 | |
| 461 | # Create corpus directory |
| 462 | $input = catdir("$temp", $corpus); |
| 463 | |
| 464 | # Temporary directory |
| 465 | my $dir = catdir($input, $doc, $text); |
| 466 | |
| 467 | # Write file |
| 468 | $msg = write_file($dir); |
| 469 | |
| 470 | $temp = undef; |
| 471 | $pool->finish(0, \$msg); |
| 472 | } |
| 473 | else { |
| 474 | |
| 475 | $temp = undef; |
| 476 | $msg = "Unable to extract " . $dirs[$i] . "\n"; |
| 477 | $pool->finish(1, \$msg); |
| 478 | }; |
| 479 | }; |
| 480 | } |
| 481 | |
| 482 | else { |
| 483 | print "Input is neither a directory nor an archive.\n\n"; |
| 484 | }; |
| 485 | |
| 486 | $pool->wait_all_children; |
| 487 | |
| 488 | print "Done.\n"; |
| 489 | print timestr(timediff(Benchmark->new, $t))."\n\n"; |
| 490 | } |
| 491 | |
| 492 | # Unknown command |
| 493 | else { |
| 494 | warn "Unknown command '$cmd'.\n\n"; |
| 495 | pod2usage(%ERROR_HASH); |
| 496 | } |
Nils Diewald | 2db9ad0 | 2013-10-29 19:26:43 +0000 | [diff] [blame] | 497 | |
| 498 | __END__ |
Akron | a977e17 | 2016-02-23 17:41:41 +0100 | [diff] [blame] | 499 | |
| 500 | =pod |
| 501 | |
| 502 | =encoding utf8 |
| 503 | |
| 504 | =head1 NAME |
| 505 | |
| 506 | korapxml2krill - Merge KorapXML data and create Krill friendly documents |
| 507 | |
| 508 | |
| 509 | =head1 SYNOPSIS |
| 510 | |
| 511 | $ korapxml2krill [archive] -z --input <directory> --output <filename> |
| 512 | |
| 513 | |
| 514 | =head1 DESCRIPTION |
| 515 | |
| 516 | L<KorAP::XML::Krill> is a library to convert KorAP-XML documents to files |
| 517 | compatible with the L<Krill|https://github.com/KorAP/Krill> indexer. |
| 518 | |
| 519 | |
| 520 | =head1 INSTALLATION |
| 521 | |
| 522 | The preferred way to install L<KorAP::XML::Krill> is to use L<cpanm|App::cpanminus>. |
| 523 | |
| 524 | $ cpanm https://github.com/KorAP/KorAP-XML-Krill |
| 525 | |
| 526 | In case everything went well, the C<korapxml2krill> command line tool will |
| 527 | be available. |
| 528 | |
| 529 | |
| 530 | =head1 ARGUMENTS |
| 531 | |
| 532 | =over 2 |
| 533 | |
| 534 | =item B<archive> |
| 535 | |
Akron | 2978ddd | 2016-02-27 10:54:26 +0100 | [diff] [blame] | 536 | Process an archive as a Zip-file or a folder of KorAP-XML documents. |
| 537 | |
| 538 | =item B<extract> |
| 539 | |
| 540 | Extract KorAP-XML files from a Zip-file. |
Akron | a977e17 | 2016-02-23 17:41:41 +0100 | [diff] [blame] | 541 | |
| 542 | =back |
| 543 | |
| 544 | |
| 545 | =head1 OPTIONS |
| 546 | |
| 547 | =over 2 |
| 548 | |
| 549 | =item B<--input|-i> <directory|file> |
| 550 | |
| 551 | Directory or archive file of documents to index. |
| 552 | |
| 553 | =item B<--output|-o> <directory|file> |
| 554 | |
| 555 | Output folder for archive processing or |
| 556 | document name for single output (optional), |
| 557 | writes to <STDOUT> by default. |
| 558 | |
| 559 | =item B<--overwrite|-w> |
| 560 | |
| 561 | Overwrite files that already exist. |
| 562 | |
| 563 | =item B<--token|-t> <foundry>[#<file>] |
| 564 | |
| 565 | Define the default tokenization by specifying |
| 566 | the name of the foundry and optionally the name |
| 567 | of the layer-file. Defaults to OpenNLP#tokens. |
| 568 | |
| 569 | =item B<--skip|-s> <foundry>[#<layer>] |
| 570 | |
| 571 | Skip specific foundries by specifying the name |
| 572 | or specific layers by defining the name |
| 573 | with a # in front of the foundry, |
| 574 | e.g. Mate#Morpho. Alternatively you can skip #ALL. |
| 575 | Can be set multiple times. |
| 576 | |
| 577 | =item B<--allow|-a> <foundry>#<layer> |
| 578 | |
| 579 | Allow specific foundries and layers by defining them |
| 580 | combining the foundry name with a # and the layer name. |
| 581 | |
| 582 | =item B<--primary|-p> |
| 583 | |
| 584 | Output primary data or not. Defaults to true. |
| 585 | Can be flagged using --no-primary as well. |
| 586 | |
| 587 | =item B<--jobs|-j> |
| 588 | |
| 589 | Define the number of concurrent jobs in seperated forks |
| 590 | for archive processing, defaults to 0. This is B<EXPERIMENTAL>! |
| 591 | |
| 592 | =item B<--human|-m> |
| 593 | |
| 594 | Represent the data human friendly, while the output defaults to JSON. |
| 595 | |
| 596 | =item B<--pretty|-y> |
| 597 | |
| 598 | Pretty print JSON output. |
| 599 | |
| 600 | =item B<--gzip|-z> |
| 601 | |
| 602 | Compress the output (expects a defined output file in single processing). |
| 603 | |
Akron | 2978ddd | 2016-02-27 10:54:26 +0100 | [diff] [blame] | 604 | =item B<--sigle|-sg> |
| 605 | |
| 606 | Extract the given text sigles. |
| 607 | Currently only supported on C<extract>. |
| 608 | Can be set multiple times. |
| 609 | |
Akron | a977e17 | 2016-02-23 17:41:41 +0100 | [diff] [blame] | 610 | =item B<--log|-l> |
| 611 | |
| 612 | The L<Log4perl> log level, defaults to C<ERROR>. |
| 613 | |
| 614 | =item B<--help|-h> |
| 615 | |
| 616 | Print this document. |
| 617 | |
| 618 | =item B<--version|-v> |
| 619 | |
| 620 | Print version information. |
| 621 | |
| 622 | =back |
| 623 | |
| 624 | =head1 AVAILABILITY |
| 625 | |
| 626 | https://github.com/KorAP/KorAP-XML-Krill |
| 627 | |
| 628 | |
| 629 | =head1 COPYRIGHT AND LICENSE |
| 630 | |
| 631 | Copyright (C) 2015-2016, L<IDS Mannheim|http://www.ids-mannheim.de/> |
| 632 | Author: L<Nils Diewald|http://nils-diewald.de/> |
| 633 | |
| 634 | L<KorAP::XML::Krill> is developed as part of the L<KorAP|http://korap.ids-mannheim.de/> |
| 635 | Corpus Analysis Platform at the |
| 636 | L<Institute for the German Language (IDS)|http://ids-mannheim.de/>, |
| 637 | member of the |
| 638 | L<Leibniz-Gemeinschaft|http://www.leibniz-gemeinschaft.de/en/about-us/leibniz-competition/projekte-2011/2011-funding-line-2/>. |
| 639 | |
| 640 | This program is free software published under the |
| 641 | L<BSD-2 License|https://raw.githubusercontent.com/KorAP/KorAP-XML-Krill/master/LICENSE>. |
| 642 | |
| 643 | =cut |