blob: 089ee516418d88b4c4bacb07c3d63c3a1a2cd919 [file] [log] [blame]
Akron54c52212022-03-07 18:56:21 +01001#!/usr/bin/env perl
2use strict;
3use warnings;
4
5# Comparison path
6my $cmd = '/euralex/corpus/empirist_gold_cmc/tools/compare_tokenization.perl';
7
8# Output path
9my $ud_path = '/euralex/ud_tokens';
10mkdir $ud_path;
11
12my $base = 'de_gsd-ud-train.conllu';
13
14# Split files
15chdir '/euralex/corpus/';
16system 'perl /euralex/benchmarks/cleanup/split_conllu.pl /euralex/corpus/' . $base;
17chdir '/euralex';
18
19my $gold = '/euralex/corpus/' . $base . '.split';
20my $raw = '/euralex/corpus/' . $base . '.raw';
21
22my %tools = (
23 waste => sub {
24 system 'cat ' . $raw . ' | waste -N -v0 --rcfile=./Waste/waste.rc > ' . $ud_path . '/waste/' . $base;
25 },
26 datok => sub {
27 system 'cat ' . $raw . ' | ./Datok/datok tokenize -t ./Datok/testdata/tokenizer.matok - > ' . $ud_path . '/datok/' . $base;
28 },
29 korap_tokenizer => sub {
30 system 'cat ' . $raw . ' | java -jar ./KorAP-Tokenizer/KorAP-Tokenizer.jar -l de > ' . $ud_path . '/korap_tokenizer/' . $base;
31 },
32 opennlp_simple => sub {
33 system 'cat ' . $raw . ' | ./opennlp/bin/opennlp SimpleTokenizer 2> /dev/null | sed "s/\s/\n/g" > ' . $ud_path . '/opennlp_simple/' . $base;
34 },
35 opennlp_tokenizer => sub {
36 system 'cat ' . $raw . ' | ./opennlp/bin/opennlp TokenizerME ./opennlp/models/opennlp-de-ud-gsd-tokens-1.0-1.9.3.bin 2> /dev/null | sed "s/\s/\n/g" > ' . $ud_path . '/opennlp_tokenizer/' . $base;
37 },
38 tree_tagger => sub {
39 system 'cat ' . $raw . ' | perl ./treetagger/cmd/utf8-tokenize.perl -a ./treetagger/lib/german-abbreviations 2> /dev/null > ' . $ud_path . '/tree_tagger/' . $base;
40 },
41 jtok => sub {
42 chdir '/euralex/JTok/bin';
43 system 'sh tokenize ' . $raw . ' de | grep "Token: " | perl -CS -pe "s/\s +Token: \"//; s/^(\"?[^\"]*?)\".+?$/\1/g" > ' . $ud_path . '/jtok/' . $base;
44 chdir '/euralex';
45 },
46 syntok => sub {
47 system 'python3 -m syntok.tokenizer ' . $raw . ' | sed "s/\s/\n/g" > ' . $ud_path . '/syntok/' . $base;
48 },
Akron8cb12792022-03-09 11:34:01 +010049 elephant => sub {
50 system './elephant-wrapper/bin/tokenize.sh -i ' . $raw . ' UD_German | sed "s/\s/\n/g" > ' . $ud_path . '/elephant/' . $base;
51 },
52 spacy => sub {
53 system 'python3 ./spacy/spacy_tok.py ' . $raw . ' > ' . $ud_path . '/spacy/' . $base;
54 },
Akron54c52212022-03-07 18:56:21 +010055 somajo => sub {
56 system 'somajo-tokenizer ' . $raw . ' 2> /dev/null > ' . $ud_path . '/somajo/' . $base;
57 },
58 stanford => sub {
59 system 'CLASSPATH=/euralex/stanford-corenlp-4.4.0/* java edu.stanford.nlp.pipeline.StanfordCoreNLP ' .
60 '-props german -annotators tokenize,ssplit,mwt -tokenize.language=german -file ' . $raw . ' 2> /dev/null';
61 system 'perl /euralex/benchmarks/cleanup/stanford.pl ' . $base . '.raw.out > ' . $ud_path . '/stanford/' . $base;
62 system 'rm ' . $base . '.raw.out';
63 }
64);
65
Akron8cb12792022-03-09 11:34:01 +010066 delete $tools{waste};
67 delete $tools{datok};
68 delete $tools{korap_tokenizer};
69 delete $tools{opennlp_simple};
70 delete $tools{opennlp_tokenizer};
71 delete $tools{tree_tagger};
72 delete $tools{jtok};
73 delete $tools{syntok};
74 delete $tools{somajo};
75 delete $tools{stanford};
76# delete $tools{elephant};
77# delete $tools{spacy};
Akron54c52212022-03-07 18:56:21 +010078
79# Create project folders
80foreach (keys %tools) {
81 mkdir $ud_path . '/' . $_;
82};
83
84# Run tokenization
85foreach (keys %tools) {
86 $tools{$_}->();
87};
88
89foreach my $tool (keys %tools) {
90 print "\n##########\n";
91 print "##### $tool - UD\n";
92 print "##\n";
93 system $cmd . ' -x ' . $gold . ' ' . $ud_path . '/' . $tool . '/' . $base . ' 2> /dev/null';
94};