Updated new metadata scheme
diff --git a/t/real_goethe_benchmark.t b/t/real_goethe_benchmark.t
index b70aa96..68e486a 100644
--- a/t/real_goethe_benchmark.t
+++ b/t/real_goethe_benchmark.t
@@ -3,8 +3,9 @@
 # perlbrew switch perl-blead@korap
 use strict;
 use warnings;
-use Test::More;
 use Mojo::ByteStream 'b';
+use Devel::Cycle;
+use Memory::Stats;
 
 use Benchmark qw/:hireswallclock/;
 
@@ -16,17 +17,25 @@
 use File::Basename 'dirname';
 use File::Spec::Functions 'catdir';
 
-use_ok('KorAP::Document');
+# Tokenization
+use KorAP::Tokenizer;
+use KorAP::Document;
+
+# my $stats = Memory::Stats->new;
+
+#$stats->start;
 
 # GOE/AGA/03828
-my $path = catdir(dirname(__FILE__), 'GOE/AGA/03828');
+#my $path = catdir(dirname(__FILE__), 'GOE/AGA/03828');
+my $path = catdir(dirname(__FILE__), 'BZK/D59/00089');
 # Todo: Test with absolute path!
 
-ok(my $doc = KorAP::Document->new( path => $path . '/' ), 'Load Korap::Document');
-ok($doc->parse, 'Parse document');
+# do something
+#$stats->checkpoint(sprintf("%20s", "Init"));
 
-# Tokenization
-use_ok('KorAP::Tokenizer');
+my $doc = KorAP::Document->new( path => $path . '/' );
+$doc->parse;
+# $stats->checkpoint(sprintf("%20s", "After Parsing"));
 
 my ($token_base_foundry, $token_base_layer) = (qw/OpenNLP Tokens/);
 
@@ -38,34 +47,52 @@
   layer => $token_base_layer,
   name => 'tokens'
 );
-ok($tokens, 'Token Object is fine');
-ok($tokens->parse, 'Token parsing is fine');
+$tokens->parse;
+#$stats->checkpoint(sprintf("%20s", "After Tokenization"));
 
-ok($tokens->add('Base', 'Sentences'),        'Add base sentences');
-ok($tokens->add('Base', 'Paragraphs'),       'Add base paragraphs');
-ok($tokens->add('OpenNLP', 'Sentences'),     'Add opennlp sentences');
-ok($tokens->add('OpenNLP', 'Morpho'),        'Add opennlp morpho');
-ok($tokens->add('TreeTagger', 'Sentences'),  'Add tt sentences');
-ok($tokens->add('TreeTagger', 'Morpho'),     'Add tt morpho');
-ok($tokens->add('CoreNLP', 'NamedEntities'), 'Add corenlp ne');
-ok($tokens->add('CoreNLP', 'Sentences'),     'Add corenlp sentences');
-ok($tokens->add('CoreNLP', 'Morpho'),        'Add corenlp morpho');
-ok($tokens->add('CoreNLP', 'Constituency'),  'Add corenlp constituency');
-ok($tokens->add('Glemm', 'Morpho'),          'Add glemm morpho');
+$tokens->add('Base', 'Sentences');
+#$stats->checkpoint(sprintf("%20s", "After Base/Sentences"));
+
+$tokens->add('Base', 'Paragraphs');
+#$stats->checkpoint(sprintf("%20s", "After Base/Paragraphs"));
+
+$tokens->add('OpenNLP', 'Sentences');
+#$stats->checkpoint(sprintf("%20s", "After OpenNLP/Sentences"));
+
+$tokens->add('OpenNLP', 'Morpho');
+#$stats->checkpoint(sprintf("%20s", "After OpenNLP/Morpho"));
+
+$tokens->add('TreeTagger', 'Sentences');
+#$stats->checkpoint(sprintf("%20s", "After TT/Sentences"));
+
+$tokens->add('TreeTagger', 'Morpho');
+#$stats->checkpoint(sprintf("%20s", "After TT/Morpho"));
+
+$tokens->add('CoreNLP', 'Sentences');
+#$stats->checkpoint(sprintf("%20s", "After CoreNLP/Sentences"));
+
+$tokens->add('CoreNLP', 'Constituency');
+#$stats->checkpoint(sprintf("%20s", "After CoreNLP/Constituency"));
+
+#$stats->stop;
+#$stats->report;
+
+$tokens->add('CoreNLP', 'NamedEntities');
+$tokens->add('CoreNLP', 'Morpho');
+$tokens->add('Glemm', 'Morpho');
 # t ok($tokens->add('Connexor', 'Sentences'),    'Add cnx sentences');
 # t ok($tokens->add('Connexor', 'Morpho'),       'Add cnx morpho');
 # t ok($tokens->add('Connexor', 'Phrase'),       'Add cnx phrase');
 # t ok($tokens->add('Connexor', 'Syntax'),       'Add cnx syntax');
-ok($tokens->add('Mate', 'Morpho'),           'Add mate morpho');
+$tokens->add('Mate', 'Morpho');
 # $tokens->add('Mate', 'Dependency');
 # t ok($tokens->add('XIP', 'Sentences'),         'Add xip sentences');
 # t ok($tokens->add('XIP', 'Morpho'),            'Add xip morpho');
 # t ok($tokens->add('XIP', 'Constituency'),      'Add xip constituency');
 # $tokens->add('XIP', 'Dependency');
-ok($tokens->to_json, 'To json');
+# ok($tokens->to_json, 'To json');
 
-is($tokens->doc->to_hash->{title}, 'Autobiographische Einzelheiten');
+#b($tokens->to_json)->spurt('AGA-03828.json');
+b($tokens->to_json)->spurt('D59-00089.json');
 
-b($tokens->to_json)->spurt('AGA-03828.json');
-
-diag timestr(timediff(Benchmark->new, $t));
+# timestr(timediff(Benchmark->new, $t));