upgrading repo to latest version
diff --git a/.gitignore b/.gitignore
index df93758..1d77f02 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,3 +1,5 @@
+outputs/
+
 # Byte-compiled / optimized / DLL files
 __pycache__/
 *.py[cod]
diff --git a/DeReKo/all_dereko_filenames.txt b/DeReKo/all_dereko_filenames.txt
new file mode 100644
index 0000000..5d9940f
--- /dev/null
+++ b/DeReKo/all_dereko_filenames.txt
@@ -0,0 +1,3173 @@
+a00
+a01
+a07
+a08
+a09
+a10
+a11
+a12
+a13
+a14
+a15
+a16
+a17
+a18
+a19
+a97
+a98
+a99
+aan04
+aan05
+aan06
+aan07
+aan08
+aan09
+aan10
+aan11
+aan12
+aan13
+aan14
+aan15
+aan16
+aan17
+aan18
+aan19
+aaz03
+aaz04
+aaz05
+aaz06
+aaz07
+aaz08
+aaz09
+aaz10
+aaz11
+aaz12
+aaz13
+aaz14
+aaz15
+aaz16
+aaz17
+aaz18
+aaz19
+abm17
+abm18
+abm19
+abo02
+abo03
+abo04
+abo05
+abo06
+abo07
+abo08
+abo09
+abo10
+abo11
+abo12
+abo13
+abo14
+abo15
+abo16
+abo17
+abo18
+abo19
+aez17
+aez18
+aez19
+afz17
+afz18
+afz19
+agz17
+agz18
+agz19
+aho17
+aho18
+aho19
+alz15
+alz16
+alz17
+alz18
+alz19
+art15
+art16
+art17
+art18
+art19
+auf17
+auf18
+auf19
+auh17
+auh18
+auh19
+azm05
+azm06
+azm07
+azm08
+azm09
+azm10
+azm11
+azm12
+azm13
+azm14
+azm15
+azm16
+azm17
+azm18
+azm19
+b09
+b10
+b11
+b12
+b13
+b14
+b15
+b16
+b17
+b18
+b19
+baz00
+baz01
+baz02
+baz03
+baz04
+baz05
+baz06
+baz07
+baz08
+baz09
+baz10
+baz11
+baz12
+baz13
+baz14
+baz15
+baz16
+baz17
+baz18
+baz19
+bdz03
+bdz04
+bdz05
+bdz06
+bdz07
+bdz08
+bdz09
+bdz10
+bdz11
+bdz12
+bdz13
+bdz14
+bdz15
+bdz16
+bdz17
+bdz18
+bdz19
+bee15
+bee16
+bee17
+bee18
+bee19
+bei17
+bei18
+bei19
+beo02
+beo03
+beo04
+beo05
+beo06
+beo07
+beo08
+beo09
+beo10
+beo11
+beo12
+beo13
+beo14
+beo15
+beo18
+beo19
+bez01
+bez02
+bez03
+bez04
+bez05
+bez06
+bez07
+bez08
+bez09
+bez10
+bez11
+bez12
+bez13
+bez18
+bez19
+bil17
+bil18
+bil19
+bio-pub
+bkr12
+bkr13
+bkr14
+bkr15
+bkr16
+bkr17
+bkr18
+bkr19
+bku00
+bku01
+bku02
+bku03
+bku04
+bku05
+bku06
+bku07
+bku08
+bku09
+bku10
+bku11
+bku12
+bku13
+bku14
+bku15
+bku16
+bku17
+bku18
+bku19
+bla18
+bla19
+bli04
+bli05
+bli06
+bli07
+bli08
+bli09
+bli10
+bli11
+bli12
+bli13
+bli14
+bli15
+bli16
+bli17
+bli18
+bli19
+bmo13
+bmo14
+bmo15
+bmo16
+bmo17
+bmo18
+bmo19
+bna12
+bna13
+bna14
+bna15
+bna16
+bna17
+bna18
+bna19
+bot17
+bot18
+bot19
+boz00
+boz01
+boz02
+boz03
+boz04
+boz05
+boz06
+boz07
+boz08
+boz09
+boz10
+boz11
+boz12
+boz13
+boz14
+boz15
+boz16
+boz17
+boz18
+boz19
+brg09
+brg10
+brg11
+brg12
+brg13
+brg14
+brg15
+brg16
+brg17
+brg18
+brg19
+brm18
+brm19
+bru09
+bru10
+bru11
+bru12
+bru13
+bru14
+bru15
+bru16
+bru17
+bru18
+bru19
+brw15
+brw16
+brw17
+brw18
+brw19
+brz05
+brz06
+brz07
+brz08
+brz09
+brz10
+brz11
+brz12
+brz13
+bsz04
+bsz05
+bsz06
+bsz07
+bsz08
+bsz09
+bsz10
+bsz11
+bsz12
+bsz13
+bsz14
+bsz15
+bsz16
+bsz17
+bue06
+bue07
+bue08
+bue09
+bue10
+bue11
+bue12
+bue13
+bue14
+bue15
+bue16
+bue17
+bue18
+bue19
+bun00
+bun04
+bun05
+bun06
+bun07
+bun08
+bun09
+bun10
+bun11
+bun12
+bun13
+bun14
+bun15
+bun16
+bun17
+bun18
+bun19
+bup18
+bup19
+bvz07
+bvz08
+bvz09
+bvz10
+bvz11
+bvz12
+bvz13
+bvz14
+bvz15
+bwa13
+bwa14
+bwa15
+bwa16
+bwa17
+bwa18
+bwa19
+bze00
+bze01
+bze02
+bze03
+bze04
+bze05
+bze06
+bze07
+bze08
+bze09
+bze10
+bze11
+bze12
+bze13
+bze14
+bze15
+bze16
+bze17
+bze18
+bze19
+bzg11
+bzg12
+bzg13
+bzg14
+bzg15
+bzg16
+bzg17
+bzg18
+bzg19
+c93
+c94
+c95
+c96
+c97
+c98
+cap17
+cap18
+cap19
+chk15
+chk16
+chk17
+chk18
+chk19
+cid17
+cid18
+cid19
+cio17
+cio18
+cio19
+cit17
+cit18
+cit19
+cou15
+cou16
+cou17
+cou18
+cou19
+ct17
+ct18
+ct19
+ctb09
+ctb10
+ctb11
+ctb12
+ctb13
+ctb14
+ctb15
+ctb16
+ctb17
+ctb18
+ctb19
+dae17
+dae18
+dae19
+dak15
+dak16
+dak17
+dak18
+dak19
+daz13
+daz14
+daz15
+daz16
+daz17
+daz18
+daz19
+dbt17
+dbt18
+dbt19
+dec00
+dec01
+dec02
+dec03
+dec04
+dec05
+dec06
+dec07
+dec08
+dec09
+dec10
+dec11
+dec12
+dec13
+dec14
+dec15
+dec16
+dec17
+dec18
+dec19
+dgl19
+dib17
+dib18
+dki11
+dki12
+dki13
+dki14
+dki15
+dki16
+dki17
+dki18
+dki19
+dnn13
+dnn14
+dnn15
+dnn16
+dnn17
+dnn18
+dnn19
+dog15
+dog16
+dog17
+dog18
+dog19
+dol00
+dol01
+dol02
+dol03
+dol04
+dol05
+dol06
+dol07
+dol08
+dol09
+dol10
+dol11
+dol12
+dol13
+dol14
+dol15
+dol16
+dol17
+dol18
+dol19
+dpr13
+dpr14
+dpr15
+dpr16
+dpr17
+dpr18
+dpr19
+dsz17
+dsz18
+dsz19
+dtz17
+dtz18
+dtz19
+dvz17
+dvz18
+dvz19
+dww17
+dww18
+dww19
+e00
+e01
+e02
+e03
+e04
+e05
+e06
+e07
+e08
+e09
+e10
+e11
+e12
+e13
+e14
+e15
+e16
+e17
+e18
+e19
+e96
+e97
+e98
+e99
+edf15
+edf16
+edf17
+edf18
+edf19
+eft15
+eft16
+eft17
+eft18
+eft19
+ein17
+ein18
+elf15
+elf16
+elf17
+elf18
+elf19
+elo17
+elo18
+elo19
+elt15
+elt16
+elt17
+elt18
+elt19
+ene17
+ene18
+ene19
+epr17
+epr18
+epr19
+erk
+ett15
+ett16
+ett17
+ett18
+ett19
+eut15
+eut16
+eut17
+eut18
+eut19
+euw17
+euw18
+euw19
+eze12
+eze13
+eze14
+eze15
+eze16
+eze17
+eze18
+eze19
+fis15
+fis16
+fis17
+fis18
+fis19
+flt00
+flt01
+flt02
+flt03
+flt04
+flt05
+flt06
+flt07
+flt08
+flt09
+flt10
+flt11
+flt12
+flt13
+flt14
+flt15
+flt16
+flt17
+flt18
+flt19
+fmt00
+fmt01
+fmt02
+fmt03
+fmt04
+fmt05
+fmt06
+fmt07
+fmt08
+fmt09
+fmt10
+fmt11
+fmt12
+fmt13
+fmt14
+fmt15
+fmt16
+fmt17
+fmt18
+fnp00
+fnp01
+fnp02
+fnp03
+fnp04
+fnp05
+fnp06
+fnp07
+fnp08
+fnp09
+fnp10
+fnp11
+fnp12
+fnp13
+fnp14
+fnp15
+fnp16
+fnp17
+fnp18
+fnp19
+foc00
+foc01
+foc02
+foc03
+foc04
+foc05
+foc06
+foc07
+foc08
+foc09
+foc10
+foc11
+foc12
+foc13
+foc14
+foc15
+foc16
+foc17
+foc18
+foc19
+fom00
+fom01
+fom02
+fom03
+fom04
+fom05
+fom06
+fom07
+fom08
+fom09
+fom10
+fom11
+fom12
+fom13
+fom14
+fom15
+fom16
+fom17
+fom18
+fom19
+fpc11
+fpc12
+fpc13
+fpc14
+fpc15
+fpc16
+fpc17
+fpc18
+fpc19
+fra18
+fra19
+frt05
+frt06
+frt07
+frt08
+frt09
+frt10
+frt11
+frt12
+frt13
+frt14
+frt15
+frt16
+frt17
+frt18
+frt19
+fsp-pub
+fuw17
+fuw18
+fuw19
+gal15
+gal16
+gal17
+gal18
+gal19
+gaz00
+gaz01
+gaz02
+gaz03
+gaz04
+gaz05
+gaz06
+gaz07
+gaz08
+gaz09
+gaz10
+gaz11
+gaz12
+gaz13
+gaz14
+gaz15
+gaz16
+gaz17
+gaz18
+gaz19
+ge00
+ge01
+ge02
+ge03
+ge04
+ge05
+ge06
+ge07
+ge08
+ge09
+ge10
+ge11
+ge12
+ge13
+ge14
+ge15
+ge16
+ge17
+ge18
+ge19
+ge99
+geo09
+geo10
+geo11
+geo12
+geo13
+geo14
+geo15
+geo16
+geo17
+geo18
+geo19
+ges15
+ges16
+ges17
+ges18
+ges19
+ggt17
+ggt18
+ggt19
+giz04
+giz05
+giz06
+giz07
+giz08
+giz09
+giz10
+giz11
+giz12
+giz13
+giz14
+giz15
+giz16
+giz17
+giz18
+giz19
+gng15
+gng16
+gng17
+gng18
+gng19
+gob15
+gob16
+gob17
+gob18
+gob19
+goe
+gr1
+gri
+gsp15
+gsp16
+gsp17
+gsp18
+gsp19
+gta15
+gta16
+gta17
+gta18
+gta19
+gtb04
+gtb05
+gtb06
+gtb07
+gtb08
+gtb09
+gtb10
+gtb11
+gtb12
+gtb13
+gtb14
+gtb15
+gtb16
+gtb17
+gwp17
+gwp18
+gwp19
+h85
+h86
+h87
+h88
+haa00
+haa01
+haa02
+haa03
+haa04
+haa05
+haa06
+haa07
+haa08
+haa09
+haa10
+haa11
+haa12
+haa13
+haa14
+haa15
+haa16
+haa17
+haa18
+haa19
+haa99
+hab00
+hab01
+hab02
+hab03
+hab04
+hab05
+hab06
+hab07
+hab08
+hab09
+hab10
+hab11
+hab12
+hab13
+hab14
+hab15
+hab16
+hab17
+hab18
+hab19
+hat13
+hat14
+hat15
+hat16
+hat17
+hat18
+hat19
+hau18
+hau19
+haz07
+haz08
+haz09
+haz10
+haz11
+haz12
+haz13
+haz14
+haz15
+haz16
+haz17
+haz18
+haz19
+hdz00
+hdz01
+hdz02
+hdz03
+hdz04
+hdz05
+hdz06
+hdz07
+hdz08
+hdz09
+hdz10
+hdz11
+hdz12
+hdz13
+hdz14
+hdz15
+hdz16
+hdz17
+hdz18
+hdz19
+hdz92
+hdz93
+hdz94
+hdz95
+hdz96
+hdz97
+hdz98
+hdz99
+hfz03
+hfz04
+hfz05
+hfz06
+hfz07
+hfz08
+hfz09
+hfz10
+hfz11
+hfz12
+hfz13
+hfz14
+hfz15
+hfz16
+hfz17
+hfz18
+hfz19
+hgz17
+hgz18
+hgz19
+hhz04
+hhz05
+hhz06
+hhz07
+hhz08
+hhz09
+hhz10
+hhz11
+hhz12
+hhz13
+hhz14
+hhz15
+hhz16
+hhz17
+hhz18
+hhz19
+hkr14
+hkr15
+hkr16
+hkr17
+hkr18
+hkr19
+hmp05
+hmp06
+hmp07
+hmp08
+hmp09
+hmp10
+hmp11
+hmp12
+hmp13
+hmp14
+hmp15
+hmp16
+hmp17
+hmp18
+hmp19
+hna19
+hor17
+hor18
+hor19
+hrz07
+hrz08
+hrz09
+hrz10
+hrz11
+hrz12
+hrz13
+hrz14
+hrz15
+hrz16
+hrz17
+hrz18
+hrz19
+hst09
+hst10
+hst11
+hst12
+hst13
+hst14
+hst15
+hst16
+hst17
+hst18
+hst19
+htb14
+htb15
+htb16
+htb17
+htb18
+htb19
+hzs18
+hzs19
+hzw18
+hzw19
+hzz14
+hzz15
+hzz16
+hzz17
+hzz18
+hzz19
+i00
+i01
+i02
+i03
+i04
+i05
+i06
+i07
+i08
+i09
+i10
+i11
+i12
+i13
+i14
+i15
+i16
+i17
+i18
+i19
+i96
+i97
+i98
+i99
+imw17
+imw18
+imw19
+imz17
+imz18
+imz19
+itb17
+itb18
+itb19
+itt17
+itt18
+itt19
+ix17
+ix18
+ix19
+jue10
+jue11
+jue12
+jue13
+jue14
+jue15
+jue16
+jue17
+jue18
+jue19
+k00
+k02
+k03
+k04
+k05
+k06
+k07
+k08
+k09
+k10
+k11
+k12
+k13
+k14
+k15
+k16
+k17
+k18
+k19
+k96
+k97
+k98
+k99
+kaz04
+kaz05
+kaz06
+kaz07
+kaz08
+kaz09
+kaz10
+kaz11
+kaz12
+kaz13
+kaz14
+kaz15
+kaz16
+kaz17
+kaz18
+kaz19
+kfz17
+kfz18
+kfz19
+khh17
+khh18
+khh19
+kic
+kir13
+kir14
+kir15
+kir16
+kir17
+kir18
+kir19
+kjl
+kn19
+kru02
+kru03
+kru04
+kru05
+kru06
+kru07
+kru08
+kru09
+kru10
+kru11
+kru12
+kru13
+kru14
+kru15
+kru16
+kru17
+kru18
+kru19
+ksa00
+ksa01
+ksa02
+ksa03
+ksa04
+ksa05
+ksa06
+ksa07
+ksa08
+ksa09
+ksa10
+ksa11
+ksa12
+ksa13
+ksa14
+ksa15
+ksa16
+ksa17
+ksa18
+ksa19
+ksp
+ktz04
+ktz05
+ktz06
+ktz07
+ktz08
+ktz09
+ktz10
+ktz11
+ktz12
+ktz13
+ktz14
+kur02
+kur03
+kur04
+kur05
+kur06
+kur07
+kur08
+kur09
+kur10
+kur11
+kur12
+kur13
+kur14
+kur15
+kur16
+kur17
+kur18
+kur19
+kxp00
+kxp02
+kxp03
+kxp04
+kxp05
+kxp06
+kxp07
+kxp08
+kxp09
+kxp10
+kxp11
+kxp12
+kxp13
+kxp14
+kxp15
+kxp16
+kxp17
+kxp18
+kxp19
+l00
+l01
+l02
+l03
+l04
+l05
+l06
+l07
+l08
+l09
+l10
+l11
+l12
+l13
+l14
+l15
+l16
+l17
+l18
+l19
+l97
+l98
+l99
+lab17
+lab18
+lab19
+lah15
+lah16
+lah17
+lah18
+lah19
+lan04
+lan05
+lan06
+lan07
+lan08
+lan09
+lan10
+lan11
+lan12
+lan13
+lan14
+lan15
+lan16
+lan17
+lan18
+lan19
+laz07
+laz08
+laz09
+laz10
+laz11
+laz12
+laz13
+laz14
+laz15
+laz16
+laz17
+laz18
+laz19
+lhz14
+lhz15
+lhz16
+lhz17
+lhz18
+lhz19
+lim
+lit12
+lit13
+lit14
+lit15
+lit16
+lit17
+lit18
+lit19
+lmd17
+lmd18
+lmd19
+lmz17
+lmz18
+lmz19
+ln19
+log17
+log18
+log19
+loz-div-pub
+loz-wam
+lru00
+lru01
+lru02
+lru03
+lru04
+lru05
+lru06
+lru07
+lru08
+lru09
+lru10
+lru11
+lru12
+lru13
+lru14
+lru15
+lru16
+lru17
+lru18
+lru19
+ltb08
+ltb09
+ltb10
+ltb11
+ltb12
+ltb13
+ltb14
+ltb15
+ltb16
+ltb17
+ltb18
+ltb19
+lvz00
+lvz01
+lvz02
+lvz03
+lvz04
+lvz05
+lvz06
+lvz07
+lvz08
+lvz09
+lvz10
+lvz11
+lvz12
+lvz13
+lvz14
+lvz15
+lvz16
+lvz17
+lvz18
+lvz19
+m00
+m01
+m02
+m03
+m04
+m05
+m06
+m07
+m08
+m09
+m10
+m11
+m12
+m13
+m14
+m15
+m16
+m17
+m18
+m19
+m89
+m91
+m94
+m95
+m96
+m97
+m98
+m99
+mag07
+mag08
+mag09
+mag10
+mag11
+mag12
+mag13
+mag14
+mag15
+mag16
+mag17
+mag18
+mag19
+maz00
+maz01
+maz02
+maz03
+maz04
+maz05
+maz06
+maz07
+maz08
+maz09
+maz10
+maz11
+maz12
+maz13
+maz14
+maz15
+maz16
+maz17
+maz18
+maz19
+mdr17
+mdr18
+mdr19
+meg
+mep12
+mep13
+mep14
+mep15
+mep16
+mep17
+mep18
+mep19
+mew
+mib14
+mib15
+mib16
+mib17
+mib18
+mib19
+mid17
+mid18
+mid19
+mld
+mme16
+mme17
+mme18
+mme19
+mpo00
+mpo01
+mpo02
+mpo03
+mpo04
+mpo05
+mpo06
+mpo07
+mpo08
+mpo09
+mpo10
+mpo11
+mpo12
+mpo13
+mpo14
+mpo15
+mpo16
+mpo17
+mpo18
+mpo19
+mpo97
+mpo98
+mpo99
+msp00
+msp01
+msp02
+msp03
+msp04
+msp05
+msp06
+msp07
+msp08
+msp09
+msp10
+msp11
+msp12
+msp13
+msp14
+msp15
+msp16
+msp17
+msp18
+msp19
+msp98
+msp99
+mt17
+mt18
+mt19
+mtk02
+mtk03
+mtk04
+mtk05
+mtk06
+mtk07
+mtk08
+mtk09
+mtk10
+mtk11
+mtk12
+mtk13
+mtk14
+mtk15
+mtk16
+mtk17
+mtk18
+mtk19
+mut17
+mut18
+mut19
+muv14
+muv15
+muv16
+muv17
+muv18
+muv19
+mwa
+mwo17
+mwo18
+mwo19
+mze00
+mze01
+mze02
+mze03
+mze04
+mze05
+mze06
+mze07
+mze08
+mze09
+mze10
+mze11
+mze12
+mze13
+mze14
+mze15
+mze16
+mze17
+mze18
+mze19
+n00
+n01
+n02
+n03
+n04
+n05
+n06
+n07
+n08
+n09
+n10
+n11
+n12
+n13
+n14
+n15
+n16
+n17
+n18
+n19
+n91
+n92
+n93
+n94
+n95
+n96
+n97
+n98
+n99
+nbk15
+nbk16
+nbk17
+nbk18
+nbk19
+ndo15
+ndo16
+ndo17
+ndo18
+ndo19
+neo15
+neo16
+neo17
+neo18
+neu07
+neu08
+neu09
+new02
+new03
+new04
+new05
+new06
+new07
+new08
+new09
+new10
+new11
+new12
+new13
+new14
+new15
+new16
+new17
+new18
+new19
+ngz13
+ngz14
+ngz15
+ngz16
+ngz17
+ngz18
+ngz19
+nku00
+nku01
+nku02
+nku03
+nku04
+nku05
+nku06
+nku07
+nku08
+nku09
+nku10
+nku11
+nku12
+nku13
+nku14
+nku15
+nku16
+nku17
+nku18
+nku19
+nlz00
+nlz01
+nlz02
+nlz03
+nlz04
+nlz05
+nlz06
+nlz07
+nlz08
+nlz09
+nlz10
+nlz11
+nlz12
+nlz13
+nlz14
+nlz15
+nlz16
+nlz17
+nlz18
+nlz19
+nnn13
+nnn14
+nnn15
+nnn16
+nnn17
+nnn18
+nnn19
+nnp14
+nnp15
+nnp16
+nnp17
+nnp18
+nnp19
+non07
+non08
+non09
+non10
+non11
+non12
+non13
+non14
+non15
+non16
+non17
+non18
+non19
+now16
+now17
+now18
+now19
+noz12
+noz13
+noz14
+noz15
+noz16
+noz17
+noz18
+noz19
+npr15
+npr16
+npr17
+npr18
+npr19
+nun00
+nun01
+nun02
+nun03
+nun04
+nun05
+nun06
+nun07
+nun08
+nun09
+nun10
+nun11
+nun12
+nun13
+nun14
+nun15
+nun16
+nun17
+nun18
+nun19
+nun90
+nun91
+nun92
+nun93
+nun94
+nun95
+nun96
+nun97
+nun98
+nun99
+nuz02
+nuz03
+nuz04
+nuz05
+nuz06
+nuz07
+nuz08
+nuz09
+nuz10
+nuz11
+nuz12
+nuz13
+nuz14
+nuz15
+nuz16
+nuz17
+nuz18
+nuz19
+nvb00
+nvb01
+nvb02
+nvb03
+nvb04
+nvb05
+nvb06
+nvb07
+nvb08
+nvb09
+nvb10
+nvb11
+nvb12
+nvb13
+nvb14
+nvb15
+nvb16
+nvb17
+nvb18
+nvb19
+nvt00
+nvt01
+nvt02
+nvt03
+nvt05
+nvt06
+nvt07
+nvt09
+nvt10
+nvt11
+nvt12
+nvt13
+nvt14
+nvt15
+nvt16
+nvt17
+nvt18
+nvt19
+nwe03
+nwe04
+nwe05
+nwe06
+nwe07
+nwe08
+nwe09
+nwe10
+nwe11
+nwe12
+nwe13
+nwe14
+nwe15
+nwe16
+nwe17
+nwe18
+nwe19
+nws00
+nws01
+nws02
+nws03
+nws04
+nws09
+nws10
+nws11
+nws12
+nws13
+nws14
+nws15
+nws16
+nws17
+nws18
+nws19
+nwt15
+nwt16
+nwt17
+nwt18
+nwt19
+nwz07
+nwz08
+nwz09
+nwz10
+nwz11
+nwz12
+nwz13
+nwz14
+nwz15
+nwz16
+nwz17
+nwz18
+nwz19
+nzf00
+nzf01
+nzf02
+nzf03
+nzf04
+nzf05
+nzf06
+nzf07
+nzf08
+nzf09
+nzf10
+nzf11
+nzf12
+nzf13
+nzf14
+nzf15
+nzf16
+nzf17
+nzf18
+nzf19
+nzf94
+nzf95
+nzf96
+nzf97
+nzf98
+nzf99
+nzs02
+nzs03
+nzs04
+nzs05
+nzs06
+nzs07
+nzs08
+nzs09
+nzs10
+nzs11
+nzs12
+nzs13
+nzs14
+nzs15
+nzs16
+nzs17
+nzs18
+nzs19
+nzz00
+nzz01
+nzz02
+nzz03
+nzz04
+nzz05
+nzz06
+nzz07
+nzz08
+nzz09
+nzz10
+nzz11
+nzz12
+nzz13
+nzz14
+nzz15
+nzz16
+nzz17
+nzz18
+nzz19
+o00
+o02
+o03
+o04
+o05
+o06
+o07
+o08
+o09
+o10
+o11
+o12
+o13
+o14
+o15
+o16
+o17
+o18
+o19
+o94
+o95
+o96
+o97
+o98
+o99
+oaz13
+oaz14
+oaz15
+oaz16
+oaz17
+oaz18
+oaz19
+ohz07
+ohz08
+ohz09
+ohz10
+ohz11
+ohz12
+ohz13
+ohz14
+ohz15
+ohz16
+ohz17
+ohz18
+ohz19
+osz16
+osz17
+osz18
+osz19
+otz00
+otz01
+otz02
+otz03
+otz04
+otz05
+otz06
+otz07
+otz08
+otz09
+otz10
+otz11
+otz12
+otz17
+otz18
+otz19
+ovz13
+ovz14
+ovz15
+ovz16
+ovz17
+ovz18
+ovz19
+p00
+p02
+p03
+p04
+p05
+p06
+p07
+p08
+p09
+p10
+p11
+p12
+p13
+p14
+p15
+p16
+p17
+p18
+p19
+p91
+p92
+p93
+p94
+p95
+p96
+p97
+p98
+p99
+paz15
+paz16
+paz17
+paz18
+paz19
+pco17
+pco18
+pco19
+ph05
+ph06
+ph07
+ph08
+ph09
+ph10
+ph11
+pha17
+pha18
+pha19
+pmm15
+pmm16
+pmm17
+pmm18
+pmm19
+pnn05
+pnn06
+pnn07
+pnn08
+pnn09
+pnn10
+pnn11
+pnn12
+pnn13
+pnn14
+pnn15
+pnn16
+pnn17
+pnn18
+pnn19
+pnp01
+pnp02
+pnp03
+pnp04
+pnp05
+pnp06
+pnp07
+pnp08
+pnp09
+pnp10
+pnp11
+pnp12
+pnp13
+pnp14
+pnp15
+pnp16
+pnp17
+pnp18
+pnp19
+pp-bb
+pp-be
+pp-br
+pp-bt
+pp-bw
+pp-by
+pp-hb
+pp-he
+pp-hh
+pp-mv
+pp-ni
+pp-no
+pp-nw
+pp-rp
+pp-sh
+pp-sl
+pp-sn
+pp-st
+pp-th
+prf00
+prf01
+prf02
+prf03
+prf04
+prf05
+prf06
+prf07
+prf08
+prf09
+prf10
+prf11
+prf12
+prf13
+prf14
+prf15
+prf16
+prf17
+prf18
+prf19
+r00
+r01
+r02
+r03
+r04
+r05
+r06
+r07
+r08
+r09
+r10
+r11
+r12
+r13
+r14
+r15
+r16
+r97
+r98
+r99
+rbs17
+rbs18
+rbs19
+rga07
+rga08
+rga09
+rga10
+rga11
+rga12
+rga13
+rga14
+rga15
+rga16
+rga17
+rga18
+rga19
+rhz00
+rhz01
+rhz02
+rhz03
+rhz04
+rhz05
+rhz06
+rhz07
+rhz08
+rhz09
+rhz10
+rhz11
+rhz12
+rhz13
+rhz14
+rhz15
+rhz16
+rhz17
+rhz18
+rhz19
+rhz96
+rhz97
+rhz98
+rhz99
+rlh17
+rlh18
+rlh19
+rln14
+rln15
+rln16
+rln17
+rln18
+rln19
+rn19
+rpo01
+rpo02
+rpo03
+rpo04
+rpo05
+rpo06
+rpo07
+rpo08
+rpo09
+rpo10
+rpo11
+rpo12
+rpo13
+rpo14
+rpo15
+rpo16
+rpo17
+rpo18
+rpo19
+rsw14
+rsw15
+rsw16
+rsw17
+rsw18
+rsw19
+rue15
+rue16
+rue17
+rue18
+rue19
+rvz13
+rvz14
+rvz15
+rvz16
+rvz17
+rvz18
+rvz19
+s00
+s01
+s02
+s03
+s04
+s05
+s06
+s07
+s08
+s09
+s10
+s11
+s12
+s13
+s14
+s15
+s16
+s17
+s18
+s19
+s47
+s48
+s49
+s50
+s51
+s52
+s53
+s54
+s55
+s56
+s57
+s58
+s59
+s60
+s61
+s62
+s63
+s64
+s65
+s66
+s67
+s68
+s69
+s70
+s71
+s72
+s73
+s74
+s75
+s76
+s77
+s78
+s79
+s80
+s81
+s82
+s83
+s84
+s85
+s86
+s87
+s88
+s89
+s90
+s91
+s92
+s93
+s94
+s95
+s96
+s97
+s98
+s99
+saz11
+saz12
+saz13
+saz14
+saz15
+saz16
+saz17
+saz18
+saz19
+sbl01
+sbl05
+sbl06
+sbl07
+sbl08
+sbl09
+sbl10
+sbl11
+sbl12
+sbl13
+sbl14
+sbl15
+sbl16
+sbl17
+sbl18
+sbl19
+sbz00
+sbz01
+sbz02
+sbz03
+sbz04
+sbz05
+sbz06
+sbz07
+sbz08
+sbz09
+sbz10
+sbz11
+sbz12
+sbz13
+sbz14
+sbz15
+sbz16
+sbz17
+sbz18
+sbz19
+sch17
+sch18
+sch19
+sct13
+sct14
+sct15
+sct16
+sct17
+sct18
+sct19
+scv17
+scv18
+scv19
+scw15
+scw16
+scw17
+scw18
+scw19
+scz11
+scz12
+scz13
+scz14
+scz15
+scz16
+scz17
+scz18
+scz19
+sid
+sku00
+sku01
+sku02
+sku03
+sku04
+sku05
+sku06
+sku07
+sku08
+sku09
+sku10
+sku11
+sku12
+sku13
+sku14
+sku15
+sku16
+sku17
+sku18
+sku19
+smp13
+smp14
+smp15
+smp16
+smp17
+smp18
+smp19
+soa04
+soa05
+soa06
+soa07
+soa08
+soa09
+soa10
+soa11
+soa12
+soa14
+soa15
+soa16
+sol00
+sol01
+sol02
+sol03
+sol04
+sol05
+sol06
+sol07
+sol08
+sol09
+sol10
+sol11
+sol12
+sol13
+sol14
+sol15
+sol16
+sol17
+sol18
+sol19
+sol99
+soz05
+soz06
+soz07
+soz08
+soz09
+soz10
+soz11
+soz12
+soz13
+soz14
+soz15
+soz16
+soz17
+soz18
+spk
+spz17
+spz18
+spz19
+sta00
+sta01
+sta03
+sta04
+sta05
+sta06
+sta07
+sta08
+sta09
+sta10
+sta11
+sta12
+sta13
+sta14
+sta15
+sta16
+sta17
+sta18
+sta19
+stb14
+stb15
+stb16
+stb17
+stb18
+stb19
+ste00
+ste01
+ste02
+ste03
+ste04
+ste05
+ste06
+ste07
+ste08
+ste09
+ste10
+ste11
+ste12
+ste13
+ste14
+ste15
+ste16
+ste17
+ste18
+ste19
+ste96
+ste97
+ste98
+ste99
+stg18
+stg19
+stn04
+stn05
+stn06
+stn07
+stn08
+stn09
+stn10
+stn11
+stn12
+stn13
+stn14
+stn15
+stn16
+stz00
+stz01
+stz02
+stz03
+stz04
+stz05
+stz06
+stz07
+stz08
+stz09
+stz10
+stz11
+stz12
+stz13
+stz14
+stz15
+stz16
+svz04
+svz05
+svz06
+svz07
+svz08
+svz09
+svz10
+svz11
+svz12
+svz13
+svz14
+svz15
+svz16
+svz17
+svz18
+svz19
+swp07
+swp08
+swp09
+swp10
+swp11
+swp12
+swp13
+swp14
+swp15
+swp16
+swp17
+swp18
+swp19
+sze00
+sze01
+sze02
+sze03
+sze04
+sze05
+sze06
+sze07
+sze08
+sze09
+sze10
+sze11
+sze12
+sze13
+sze14
+sze15
+sze16
+sze17
+sze18
+sze19
+t00
+t01
+t02
+t03
+t04
+t05
+t06
+t07
+t08
+t09
+t10
+t11
+t12
+t13
+t14
+t15
+t16
+t17
+t18
+t19
+tai17
+tai18
+tai19
+tas00
+tas01
+tas02
+tas03
+tas04
+tas05
+tas06
+tas07
+tas08
+tas09
+tas10
+tas11
+tas12
+tas13
+tas14
+tas15
+tas16
+tas17
+tas18
+tas19
+tbz14
+tbz15
+tet17
+tet18
+tet19
+tew17
+tew18
+tew19
+tha00
+tha01
+tha02
+tha03
+tha04
+tha05
+tha06
+tha07
+tha08
+tha09
+tha10
+tha11
+tha12
+tha17
+tha18
+tha19
+thb17
+thb18
+thb19
+tlz00
+tlz01
+tlz02
+tlz03
+tlz04
+tlz05
+tlz06
+tlz07
+tlz08
+tlz09
+tlz10
+tlz11
+tlz12
+tlz17
+tlz18
+tlz19
+tnz14
+tnz15
+tnz16
+tnz17
+tnz18
+tnz19
+toz13
+toz14
+toz15
+tre17
+tre18
+tre19
+tru17
+tru18
+tru19
+tsp00
+tsp01
+tsp02
+tsp03
+tsp04
+tsp05
+tsp06
+tsp07
+tsp08
+tsp09
+tsp10
+tsp11
+tsp12
+tsp13
+tsp14
+tsp15
+tsp16
+tsp17
+tsp18
+tsp19
+tvd15
+tvd16
+tvd17
+tvd18
+tvd19
+tvf00
+tvf01
+tvf02
+tvf03
+tvf04
+tvf05
+tvf06
+tvf07
+tvf08
+tvf09
+tvf10
+tvf11
+tvf12
+tvf13
+tvf14
+tvf15
+tvf16
+tvf17
+tvf18
+tvf19
+u00
+u01
+u02
+u03
+u04
+u05
+u06
+u07
+u08
+u09
+u10
+u11
+u12
+u13
+u14
+u15
+u16
+u17
+u18
+u19
+u92
+u93
+u94
+u95
+u96
+u97
+u98
+u99
+uan04
+uan05
+uan06
+uan07
+uan08
+uan09
+uan10
+uan11
+uan12
+uan13
+uan14
+uan15
+uan16
+uan17
+uan18
+uan19
+v00
+v02
+v03
+v04
+v05
+v06
+v07
+v08
+v09
+v10
+v11
+v12
+v13
+v14
+v15
+v16
+v17
+v18
+v19
+v97
+v98
+v99
+vbw13
+vbw14
+vbw15
+vbw16
+vbw17
+vbw18
+vbw19
+vdi06
+vdi07
+vdi08
+vdi09
+vdi10
+vdi11
+vdi12
+vdi13
+vdi14
+vdi15
+vdi16
+vdi17
+vdi18
+vdi19
+vru17
+vru18
+vru19
+vsw17
+vsw18
+vsw19
+vzs13
+vzs14
+vzs15
+vzs16
+vzs17
+vzs18
+vzs19
+w00
+w01
+w02
+w03
+w04
+w05
+w06
+w07
+w08
+w09
+w10
+w11
+w12
+w13
+w14
+w15
+w16
+w17
+w18
+w19
+w99
+was00
+was01
+was02
+was03
+was04
+was05
+was06
+was07
+was08
+was09
+was10
+was11
+was12
+was13
+was14
+was15
+was16
+was17
+was18
+was19
+was97
+was98
+was99
+wbl00
+wbl01
+wbl02
+wbl03
+wbl04
+wbl05
+wbl06
+wbl07
+wbl08
+wbl09
+wbl10
+wbl11
+wbl12
+wbl13
+wbl14
+wbl15
+wbl16
+weo09
+weo10
+weo11
+weo12
+weo13
+weo14
+weo15
+weo16
+weo17
+weo18
+weo19
+wez09
+wez10
+wez11
+wez12
+wez13
+wez14
+wez15
+wez16
+wez17
+wez18
+wez19
+wfb10
+wfb11
+wfb12
+wfb13
+wfb14
+wfb15
+wfb18
+wfb19
+wiz00
+wiz01
+wiz02
+wiz03
+wiz04
+wiz05
+wiz06
+wiz07
+wiz08
+wiz09
+wiz10
+wiz11
+wiz12
+wiz13
+wiz14
+wiz15
+wiz16
+wiz17
+wiz18
+wiz19
+wkb
+wkd
+wku00
+wku01
+wku03
+wku04
+wku05
+wku06
+wku07
+wku08
+wku09
+wku10
+wku11
+wku12
+wku13
+wku14
+wku15
+wku16
+wku17
+wku18
+wku19
+wor02
+wor03
+wor04
+wor05
+wor06
+wor07
+wor08
+wor09
+wor10
+wor11
+wor12
+wor13
+wor14
+wor15
+wor16
+wor17
+wor18
+wor19
+wtb02
+wtb03
+wtb04
+wtb05
+wtb06
+wtb07
+wtb08
+wtb09
+wtb10
+wtb11
+wtb12
+wtb13
+wtb14
+wtb15
+wtb16
+wtb17
+wtb18
+wtb19
+wwo05
+wwo06
+wwo07
+wwo08
+wwo09
+wwo10
+wwo11
+wwo12
+wwo13
+wwo14
+wwo15
+wwo16
+wwo17
+wwo18
+wwo19
+x00
+x01
+x02
+x03
+x04
+x08
+x09
+x10
+x11
+x12
+x13
+x14
+x15
+x16
+x17
+x18
+x19
+x96
+x97
+x98
+x99
+z00
+z01
+z02
+z03
+z04
+z05
+z06
+z07
+z08
+z09
+z10
+z11
+z12
+z13
+z14
+z15
+z16
+z17
+z18
+z19
+z53
+z54
+z55
+z56
+z57
+z58
+z59
+z60
+z61
+z62
+z63
+z64
+z65
+z66
+z67
+z68
+z69
+z70
+z71
+z72
+z73
+z74
+z75
+z76
+z77
+z78
+z79
+z80
+z81
+z82
+z83
+z84
+z85
+z86
+z87
+z88
+z89
+z90
+z91
+z92
+z93
+z94
+z95
+z96
+z97
+z98
+z99
+zca09
+zca10
+zca11
+zca12
+zca13
+zca14
+zca15
+zca16
+zca17
+zca18
+zca19
+zcw15
+zcw16
+zcw17
+zcw18
+zcw19
+zge10
+zge11
+zge12
+zge13
+zge14
+zge15
+zge16
+zge17
+zge18
+zge19
+zwi09
+zwi10
+zwi11
+zwi12
+zwi13
+zwi14
+zwi15
+zwi17
+zwi18
+zwi19
diff --git a/DeReKo/cheche.txt b/DeReKo/cheche.txt
new file mode 100644
index 0000000..26c4090
--- /dev/null
+++ b/DeReKo/cheche.txt
@@ -0,0 +1,11 @@
+a00
+a01
+a07
+a08
+a09
+a10
+zwi10
+zwi11
+zwi12
+zwi13
+zwi14
\ No newline at end of file
diff --git a/DeReKo/exec_dereko_parallel.sh b/DeReKo/exec_dereko_parallel.sh
new file mode 100644
index 0000000..13ca557
--- /dev/null
+++ b/DeReKo/exec_dereko_parallel.sh
@@ -0,0 +1,94 @@
+#!/bin/bash
+
+source ~/.bashrc
+
+ROOT_DATAPATH=/export/netapp/kupietz/N-GRAMM-STUDIE/conllu
+FILES_TO_PROCESS=DeReKo/all_dereko_filenames.txt
+
+while IFS= read -r line
+do
+  my_files+=($line)
+done < ${FILES_TO_PROCESS}
+
+let files_len=${#my_files[@]}+10
+
+for (( i=0; i<=$files_len; i+=10 ))
+  do 
+  	let next=i
+	file_1=${my_files[next]}
+	python systems/parse_spacy.py --corpus_name DeReKo_${my_files[next]} --comment_str "#" \
+	  -i ${ROOT_DATAPATH}/${my_files[next]}.conllu.gz \
+	  -o ${ROOT_DATAPATH}/0_SpaCyParsed/${my_files[next]}.spacy.gl.conllu &
+	let next=next+1
+	file_2=${my_files[next]}
+	python systems/parse_spacy.py --corpus_name DeReKo_${my_files[next]} --comment_str "#" \
+	  -i ${ROOT_DATAPATH}/${my_files[next]}.conllu.gz \
+	  -o ${ROOT_DATAPATH}/0_SpaCyParsed/${my_files[next]}.spacy.gl.conllu &
+	let next=next+1
+	file_3=${my_files[next]}
+	python systems/parse_spacy.py --corpus_name DeReKo_${my_files[next]} --comment_str "#" \
+	  -i ${ROOT_DATAPATH}/${my_files[next]}.conllu.gz \
+	  -o ${ROOT_DATAPATH}/0_SpaCyParsed/${my_files[next]}.spacy.gl.conllu &
+	let next=next+1
+	file_4=${my_files[next]}
+	python systems/parse_spacy.py --corpus_name DeReKo_${my_files[next]} --comment_str "#" \
+	  -i ${ROOT_DATAPATH}/${my_files[next]}.conllu.gz \
+	  -o ${ROOT_DATAPATH}/0_SpaCyParsed/${my_files[next]}.spacy.gl.conllu &
+	let next=next+1
+	file_5=${my_files[next]}
+	python systems/parse_spacy.py --corpus_name DeReKo_${my_files[next]} --comment_str "#" \
+	  -i ${ROOT_DATAPATH}/${my_files[next]}.conllu.gz \
+	  -o ${ROOT_DATAPATH}/0_SpaCyParsed/${my_files[next]}.spacy.gl.conllu &
+	let next=next+1
+	file_6=${my_files[next]}
+	python systems/parse_spacy.py --corpus_name DeReKo_${my_files[next]} --comment_str "#" \
+	  -i ${ROOT_DATAPATH}/${my_files[next]}.conllu.gz \
+	  -o ${ROOT_DATAPATH}/0_SpaCyParsed/${my_files[next]}.spacy.gl.conllu &
+	let next=next+1
+	file_7=${my_files[next]}
+	python systems/parse_spacy.py --corpus_name DeReKo_${my_files[next]} --comment_str "#" \
+	  -i ${ROOT_DATAPATH}/${my_files[next]}.conllu.gz \
+	  -o ${ROOT_DATAPATH}/0_SpaCyParsed/${my_files[next]}.spacy.gl.conllu &
+	let next=next+1
+	file_8=${my_files[next]}
+	python systems/parse_spacy.py --corpus_name DeReKo_${my_files[next]} --comment_str "#" \
+	  -i ${ROOT_DATAPATH}/${my_files[next]}.conllu.gz \
+	  -o ${ROOT_DATAPATH}/0_SpaCyParsed/${my_files[next]}.spacy.gl.conllu &
+	let next=next+1
+	file_9=${my_files[next]}
+	python systems/parse_spacy.py --corpus_name DeReKo_${my_files[next]} --comment_str "#" \
+	  -i ${ROOT_DATAPATH}/${my_files[next]}.conllu.gz \
+	  -o ${ROOT_DATAPATH}/0_SpaCyParsed/${my_files[next]}.spacy.gl.conllu &
+	let next=next+1
+	file_10=${my_files[next]}
+	python systems/parse_spacy.py --corpus_name DeReKo_${my_files[next]} --comment_str "#" \
+	  -i ${ROOT_DATAPATH}/${my_files[next]}.conllu.gz \
+	  -o ${ROOT_DATAPATH}/0_SpaCyParsed/${my_files[next]}.spacy.gl.conllu &
+	wait
+	# Remove Original Uncompressed Files
+	rm ${ROOT_DATAPATH}/${file_1}.conllu
+	rm ${ROOT_DATAPATH}/${file_2}.conllu
+	rm ${ROOT_DATAPATH}/${file_3}.conllu
+	rm ${ROOT_DATAPATH}/${file_4}.conllu
+	rm ${ROOT_DATAPATH}/${file_5}.conllu
+	rm ${ROOT_DATAPATH}/${file_6}.conllu
+	rm ${ROOT_DATAPATH}/${file_7}.conllu
+	rm ${ROOT_DATAPATH}/${file_8}.conllu
+	rm ${ROOT_DATAPATH}/${file_9}.conllu
+	rm ${ROOT_DATAPATH}/${file_10}.conllu
+	# ZIP The generated Parsed Outputs
+	gzip ${ROOT_DATAPATH}/0_SpaCyParsed/${file_1}.spacy.gl.conllu &
+	gzip ${ROOT_DATAPATH}/0_SpaCyParsed/${file_2}.spacy.gl.conllu &
+	gzip ${ROOT_DATAPATH}/0_SpaCyParsed/${file_3}.spacy.gl.conllu &
+	gzip ${ROOT_DATAPATH}/0_SpaCyParsed/${file_4}.spacy.gl.conllu &
+	gzip ${ROOT_DATAPATH}/0_SpaCyParsed/${file_5}.spacy.gl.conllu &
+	gzip ${ROOT_DATAPATH}/0_SpaCyParsed/${file_6}.spacy.gl.conllu &
+	gzip ${ROOT_DATAPATH}/0_SpaCyParsed/${file_7}.spacy.gl.conllu &
+	gzip ${ROOT_DATAPATH}/0_SpaCyParsed/${file_8}.spacy.gl.conllu &
+	gzip ${ROOT_DATAPATH}/0_SpaCyParsed/${file_9}.spacy.gl.conllu &
+	gzip ${ROOT_DATAPATH}/0_SpaCyParsed/${file_10}.spacy.gl.conllu &
+	wait
+  done
+  
+  
+  
diff --git a/DeReKo/tutorial_examples/explore_dereko.py b/DeReKo/explore_dereko.py
similarity index 78%
rename from DeReKo/tutorial_examples/explore_dereko.py
rename to DeReKo/explore_dereko.py
index fcf4915..c5ddc49 100644
--- a/DeReKo/tutorial_examples/explore_dereko.py
+++ b/DeReKo/explore_dereko.py
@@ -17,5 +17,7 @@
         prefix = fn.split(".")[0]
         prefix = re.findall("\D+", prefix)[0]
         file_groups[prefix].append(fn)
-    for group,files in sorted(file_groups.items(), key=lambda x: len(x[1]), reverse=True):
-        print(group, len(files))
\ No newline at end of file
+        print(fn.split(".")[0])
+    
+    #for group,files in sorted(file_groups.items(), key=lambda x: len(x[1]), reverse=True):
+    #    print(group, len(files))
\ No newline at end of file
diff --git a/DeReKo/spacy_train/basic_config_allOrth.cfg b/DeReKo/spacy_train/basic_config_allOrth.cfg
new file mode 100644
index 0000000..2b27744
--- /dev/null
+++ b/DeReKo/spacy_train/basic_config_allOrth.cfg
@@ -0,0 +1,81 @@
+[paths]
+train = "/vol/netapp/daza/datasets/TIGER_conll/data_splits/train/Tiger.ALL.Orth.train.spacy"
+dev = "/vol/netapp/daza/datasets/TIGER_conll/data_splits/test/Tiger.NewOrth.test.spacy"
+
+[system]
+gpu_allocator = "pytorch"
+
+
+[nlp]
+lang = "de"
+pipeline = ["transformer", "tagger"]
+tokenizer = {"@tokenizers": "spacy.Tokenizer.v1"}
+
+[components]
+
+[components.transformer]
+factory = "transformer"
+
+[components.transformer.model]
+@architectures = "spacy-transformers.TransformerModel.v1"
+name = "bert-base-german-cased"
+tokenizer_config = {"use_fast": true}
+
+[components.transformer.model.get_spans]
+@span_getters = "spacy-transformers.strided_spans.v1"
+window = 128
+stride = 96
+
+
+[components.tagger]
+factory = "tagger"
+
+[components.tagger.model]
+@architectures = "spacy.Tagger.v1"
+nO = null
+
+[components.tagger.model.tok2vec]
+@architectures = "spacy-transformers.TransformerListener.v1"
+grad_factor = 1.0
+
+[components.tagger.model.tok2vec.pooling]
+@layers = "reduce_mean.v1"
+
+
+
+[corpora]
+
+[corpora.train]
+@readers = "spacy.Corpus.v1"
+path = ${paths.train}
+max_length = 500
+
+[corpora.dev]
+@readers = "spacy.Corpus.v1"
+path = ${paths.dev}
+max_length = 0
+
+[training]
+accumulate_gradient = 3
+dev_corpus = "corpora.dev"
+train_corpus = "corpora.train"
+
+[training.optimizer]
+@optimizers = "Adam.v1"
+
+[training.optimizer.learn_rate]
+@schedules = "warmup_linear.v1"
+warmup_steps = 250
+total_steps = 20000
+initial_rate = 5e-5
+
+
+
+[training.batcher]
+@batchers = "spacy.batch_by_padded.v1"
+discard_oversize = true
+size = 2000
+buffer = 256
+
+[initialize]
+vectors = null
diff --git a/DeReKo/spacy_train/basic_config.cfg b/DeReKo/spacy_train/basic_config_newOrth.cfg
similarity index 100%
rename from DeReKo/spacy_train/basic_config.cfg
rename to DeReKo/spacy_train/basic_config_newOrth.cfg
diff --git a/DeReKo/spacy_train/config_allOrth.cfg b/DeReKo/spacy_train/config_allOrth.cfg
new file mode 100644
index 0000000..cf3b09b
--- /dev/null
+++ b/DeReKo/spacy_train/config_allOrth.cfg
@@ -0,0 +1,123 @@
+[paths]
+train = "/vol/netapp/daza/datasets/TIGER_conll/data_splits/train/Tiger.ALL.Orth.train.spacy"
+dev = "/vol/netapp/daza/datasets/TIGER_conll/data_splits/test/Tiger.NewOrth.test.spacy"
+vectors = null
+init_tok2vec = null
+
+[system]
+gpu_allocator = "pytorch"
+seed = 0
+
+[nlp]
+lang = "de"
+pipeline = ["transformer","tagger"]
+tokenizer = {"@tokenizers":"spacy.Tokenizer.v1"}
+disabled = []
+before_creation = null
+after_creation = null
+after_pipeline_creation = null
+
+[components]
+
+[components.tagger]
+factory = "tagger"
+
+[components.tagger.model]
+@architectures = "spacy.Tagger.v1"
+nO = null
+
+[components.tagger.model.tok2vec]
+@architectures = "spacy-transformers.TransformerListener.v1"
+grad_factor = 1.0
+pooling = {"@layers":"reduce_mean.v1"}
+
+[components.transformer]
+factory = "transformer"
+max_batch_items = 4096
+set_extra_annotations = {"@annotation_setters":"spacy-transformers.null_annotation_setter.v1"}
+
+[components.transformer.model]
+@architectures = "spacy-transformers.TransformerModel.v1"
+name = "bert-base-german-cased"
+
+[components.transformer.model.get_spans]
+@span_getters = "spacy-transformers.strided_spans.v1"
+window = 128
+stride = 96
+
+[components.transformer.model.tokenizer_config]
+use_fast = true
+
+[corpora]
+
+[corpora.dev]
+@readers = "spacy.Corpus.v1"
+path = ${paths.dev}
+max_length = 0
+gold_preproc = false
+limit = 0
+augmenter = null
+
+[corpora.train]
+@readers = "spacy.Corpus.v1"
+path = ${paths.train}
+max_length = 500
+gold_preproc = false
+limit = 0
+augmenter = null
+
+[training]
+accumulate_gradient = 3
+dev_corpus = "corpora.dev"
+train_corpus = "corpora.train"
+seed = ${system.seed}
+gpu_allocator = ${system.gpu_allocator}
+dropout = 0.1
+patience = 1600
+max_epochs = 0
+max_steps = 20000
+eval_frequency = 200
+frozen_components = []
+before_to_disk = null
+
+[training.batcher]
+@batchers = "spacy.batch_by_padded.v1"
+discard_oversize = true
+size = 2000
+buffer = 256
+get_length = null
+
+[training.logger]
+@loggers = "spacy.ConsoleLogger.v1"
+progress_bar = false
+
+[training.optimizer]
+@optimizers = "Adam.v1"
+beta1 = 0.9
+beta2 = 0.999
+L2_is_weight_decay = true
+L2 = 0.01
+grad_clip = 1.0
+use_averages = false
+eps = 0.00000001
+
+[training.optimizer.learn_rate]
+@schedules = "warmup_linear.v1"
+warmup_steps = 250
+total_steps = 20000
+initial_rate = 0.00005
+
+[training.score_weights]
+tag_acc = 1.0
+
+[pretraining]
+
+[initialize]
+vectors = null
+init_tok2vec = ${paths.init_tok2vec}
+vocab_data = null
+lookups = null
+
+[initialize.components]
+
+[initialize.tokenizer]
\ No newline at end of file
diff --git a/DeReKo/spacy_train/config.cfg b/DeReKo/spacy_train/config_newOrth.cfg
similarity index 100%
rename from DeReKo/spacy_train/config.cfg
rename to DeReKo/spacy_train/config_newOrth.cfg
diff --git a/DeReKo/spacy_train/conll2spacy.py b/DeReKo/spacy_train/conll2spacy.py
index f0d0d4c..9be14ab 100644
--- a/DeReKo/spacy_train/conll2spacy.py
+++ b/DeReKo/spacy_train/conll2spacy.py
@@ -4,17 +4,32 @@
 
 if __name__ == "__main__":
 	"""
-		--- TIGER New Orthography ---
-			python DeReKo/spacy_train/conll2spacy.py --corpus_name TigerNew --gld_token_type CoNLLUP_Token \
-				-i /home/daza/datasets/TIGER_conll/data_splits/train/Tiger.NewOrth.train.conll \
-				-o DeReKo/spacy_train/Tiger.NewOrth.train.json \
-				-t DeReKo/spacy_train/Tiger.NewOrth.train.txt
-			
-			python DeReKo/spacy_train/conll2spacy.py --corpus_name TigerNew --gld_token_type CoNLLUP_Token \
+	
+	--- TIGER NEW Orthography ---
+		python DeReKo/spacy_train/conll2spacy.py --corpus_name TigerNew --gld_token_type CoNLLUP_Token \
+			-i /home/daza/datasets/TIGER_conll/data_splits/train/Tiger.NewOrth.train.conll \
+			-o DeReKo/spacy_train/Tiger.NewOrth.train.json \
+			-t DeReKo/spacy_train/Tiger.NewOrth.train.txt
+		
+		python DeReKo/spacy_train/conll2spacy.py --corpus_name TigerNew --gld_token_type CoNLLUP_Token \
 			-i /home/daza/datasets/TIGER_conll/data_splits/test/Tiger.NewOrth.test.conll \
 			-o DeReKo/spacy_train/Tiger.NewOrth.test.json \
 			-t DeReKo/spacy_train/Tiger.NewOrth.test.txt
 			
+	--- TIGER NEW + OLD Orthography ---
+		cat Tiger.OldOrth.train.conll Tiger.NewOrth.train.conll > Tiger.ALL.Orth.train.conll
+		cat Tiger.OldOrth.test.conll Tiger.NewOrth.test.conll > Tiger.ALL.Orth.test.conll
+	
+		python DeReKo/spacy_train/conll2spacy.py --corpus_name TigerALL --gld_token_type CoNLLUP_Token \
+			-i /home/daza/datasets/TIGER_conll/data_splits/train/Tiger.ALL.Orth.train.conll \
+			-o /home/daza/datasets/TIGER_conll/data_splits/train/Tiger.ALL.Orth.train.json \
+			-t /home/daza/datasets/TIGER_conll/data_splits/train/Tiger.ALL.Orth.train.txt
+		
+		python DeReKo/spacy_train/conll2spacy.py --corpus_name TigerALL --gld_token_type CoNLLUP_Token \
+			-i /home/daza/datasets/TIGER_conll/data_splits/test/Tiger.ALL.Orth.test.conll \
+			-o /home/daza/datasets/TIGER_conll/data_splits/test/Tiger.ALL.Orth.test.json \
+			-t /home/daza/datasets/TIGER_conll/data_splits/test/Tiger.ALL.Orth.test.txt
+			
 	"""
 	
 	parser = argparse.ArgumentParser()
diff --git a/DeReKo/spacy_train/custom_spacy_dereko.py b/DeReKo/spacy_train/custom_spacy_dereko.py
index a4674dd..4b35282 100644
--- a/DeReKo/spacy_train/custom_spacy_dereko.py
+++ b/DeReKo/spacy_train/custom_spacy_dereko.py
@@ -1,11 +1,11 @@
 import spacy
+ 
+ORIG_DEREKO_VECS = "/export/netapp/kupietz/embeddings/dereko-2020-ii-alpha.all.txt.gz"
 
-#ORIG_DEREKO_VECS = "/export/netapp/kupietz/embeddings/dereko-2020-ii-alpha.all.txt.gz"
+# Made with command (2.x): python -m spacy init-model de de_fastext_vectors --vectors-loc dereko_vectors/cc.de.300.vec.gz
+# Made with command (3.x): python -m spacy init vectors de dereko_vectors/dereko-2020-ii-alpha.all.txt dereko_vectors/ --name de_dereko_2020
+VECTORS  = "lib/dereko_vectors" 
 
-# Made with command: python -m spacy init-model de de_fastext_vectors --vectors-loc dereko_vectors/cc.de.300.vec.gz
-SPACY_FAST_DE = "../../lib/de_fastext_vectors" 
-
-VECTORS = SPACY_FAST_DE
 
 nlp_dereko = spacy.load(VECTORS)
 doc1 = nlp_dereko("`` Ross Perot wäre vielleicht ein prächtiger Diktator ''")
diff --git a/DeReKo/spacy_train/spacy_bin_corpora/Tiger.NewOrth.test.spacy b/DeReKo/spacy_train/spacy_bin_corpora/Tiger.NewOrth.test.spacy
deleted file mode 100644
index de11ccc..0000000
--- a/DeReKo/spacy_train/spacy_bin_corpora/Tiger.NewOrth.test.spacy
+++ /dev/null
Binary files differ
diff --git a/DeReKo/spacy_train/spacy_bin_corpora/Tiger.NewOrth.train.spacy b/DeReKo/spacy_train/spacy_bin_corpora/Tiger.NewOrth.train.spacy
deleted file mode 100644
index 857239d..0000000
--- a/DeReKo/spacy_train/spacy_bin_corpora/Tiger.NewOrth.train.spacy
+++ /dev/null
Binary files differ
diff --git a/my_utils/clean_dereko_vectors.py b/my_utils/clean_dereko_vectors.py
new file mode 100644
index 0000000..934ce27
--- /dev/null
+++ b/my_utils/clean_dereko_vectors.py
@@ -0,0 +1,14 @@
+import gzip
+
+ORIG_DEREKO_VECS = "/export/netapp/kupietz/embeddings/dereko-2020-ii-alpha.all.txt.gz"
+txt_vec = open("/home/daza/ids-projects/DeReKo/spacy_train/dereko-2020-ii-alpha.all.txt", "w")
+
+skept_vecs = 0
+with gzip.open(ORIG_DEREKO_VECS,'r') as f:        
+	for ix, line in enumerate(f):     
+		try:
+			txt_vec.write(line.decode("utf-8"))
+		except:
+			skept_vecs += 1
+
+print(skept_vecs)
\ No newline at end of file
diff --git a/my_utils/conll_to_tok.py b/my_utils/conll_to_tok.py
index d5656e8..2dbe2ed 100644
--- a/my_utils/conll_to_tok.py
+++ b/my_utils/conll_to_tok.py
@@ -12,11 +12,21 @@
 				-ss "</S>" \
 				--token_type CoNLL09_Token
 				
+			*** GERMAN UNIVERSAL DEPS TEST ***
+			
 			python my_utils/conll_to_tok.py \
 				-s /home/daza/datasets/ud-treebanks-v2.2/UD_German-GSD/de_gsd-ud-test.conllu \
 				-ss "</S>" \
 				--token_type CoNLLUP_Token
 			
+			*** TIGER TEST NEW ORTH ***
+			
+			python my_utils/conll_to_tok.py \
+			-s /home/daza/datasets/TIGER_conll/data_splits/test/Tiger.NewOrth.test.conll \
+			-ss "</S>" \
+			--token_type CoNLLUP_Token
+			
+		
 		For RNNTagger
 			python my_utils/conll_to_tok.py \
 				-s /home/daza/datasets/TIGER_conll/tiger_release_aug07.corrected.16012013.conll09 \
@@ -34,7 +44,10 @@
 	parser.add_argument("-c", "--com_str", help="Skip line if it starts with this string (comment market)", default="# ")
 	args = parser.parse_args()
 	
-	output_file = open(f"{args.src_file}.tok","w")
+	if args.sent_sep == "":
+		output_file = open(f"{args.src_file}.tok","w")
+	else:
+		output_file = open(f"{args.src_file}.sep.tok","w")
 	
 	for conll_obj in read_conll_generator(args.src_file, token_class=get_token_type(args.token_type), comment_str=args.com_str):
 		for tok in conll_obj.tokens:
diff --git a/my_utils/make_new_orth_silver_lemmas.py b/my_utils/make_new_orth_silver_lemmas.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/my_utils/make_new_orth_silver_lemmas.py
diff --git a/my_utils/make_tiger_new_orth.py b/my_utils/make_tiger_new_orth.py
index 8886f74..7bb42f4 100644
--- a/my_utils/make_tiger_new_orth.py
+++ b/my_utils/make_tiger_new_orth.py
@@ -2,7 +2,7 @@
 from lib.CoNLL_Annotation import read_conll, CoNLL09_Token, TigerNew_Token 
 from collections import Counter
 
-ORIGINAL_TIGER = "/home/daza/datasets/TIGER_conll/tiger_release_aug07.corrected.16012013.conll09"
+ORIGINAL_TIGER = "/home/daza/datasets/TIGER_conll/TIGER_original_data/tiger_release_aug07.corrected.16012013.conll09"
 NEW_ORTH = "/vol/work/kupietz/Tiger_2_2/data/german/tiger/train/german_tiger_new_orthography.csv"
 
 
@@ -62,17 +62,17 @@
 	print(f"{len(problematic_sents)}/{len(train_tiger)} ({len(problematic_sents)*100/len(train_tiger)}%) of sentences have change of orthography.")
 	print(f"{len(token_changes)}/{total_tokens} ({len(token_changes)*100/total_tokens}%) of tokens have change of orthography.")
 	# Save Files
-	tiger_path = "/home/daza/datasets/TIGER_conll/"
+	save_path = "/home/daza/datasets/TIGER_conll"
 	new_cases = Counter(token_changes).most_common()
 	case_mapping = get_confident_mapping(new_cases)
 	# Stats
-	fu.counter_to_file(new_cases, f"{tiger_path}/TigerTokensChangeOrth.train.tsv")
-	fu.dict_to_file(case_mapping, f"{tiger_path}/TigerOrthMapping.train.json")
-	fu.list_to_file(problematic_sents, f"{tiger_path}/NewOrthProblems_Indices.train.txt")
+	fu.counter_to_file(new_cases, f"{save_path}/TigerTokensChangeOrth.train.tsv")
+	fu.dict_to_file(case_mapping, f"{save_path}/TigerOrthMapping.train.json")
+	fu.list_to_file(problematic_sents, f"{save_path}/NewOrthProblems_Indices.train.txt")
 	# Train/Test Splits
 	old_train, new_train = zip(*train_tiger)
-	fu.write_conll_file(old_train, out_path=f"{tiger_path}/Tiger.OldOrth.train.conll")
-	fu.write_conll_file(new_train, out_path=f"{tiger_path}/Tiger.NewOrth.train.conll")
-	fu.write_conll_file(test_tiger, out_path=f"{tiger_path}/Tiger.OldOrth.test.conll")
+	fu.write_conll_file(old_train, out_path=f"{save_path}/Tiger.OldOrth.train.conll")
+	fu.write_conll_file(new_train, out_path=f"{save_path}/Tiger.NewOrth.train.conll")
+	fu.write_conll_file(test_tiger, out_path=f"{save_path}/Tiger.OldOrth.test.conll")
 	
 		
diff --git a/systems/Run_Tree-RNN_Taggers.txt b/systems/Run_Tree-RNN_Taggers.txt
index e0ea45f..98a9dd2 100644
--- a/systems/Run_Tree-RNN_Taggers.txt
+++ b/systems/Run_Tree-RNN_Taggers.txt
@@ -1,7 +1,17 @@
+# To Preprocess Files (conllu ---> .sep.tok ^ .tok)
+
+python my_utils/conll_to_tok.py \
+-s /home/daza/datasets/ud-treebanks-v2.2/UD_German-GSD/de_gsd-ud-test.conllu \
+-ss "</S>" \
+--token_type CoNLLUP_Token
 
 # TreeTagger:
 
-time cmd/tree-tagger-german-notokenize /home/daza/datasets/ud-treebanks-v2.2/UD_German-GSD/de_gsd-ud-test.conllu.sep.tok /home/daza/datasets/ud-treebanks-v2.2/UD_German-GSD/de_gsd-ud-test.treetagger.parsed.conll
+time cmd/tree-tagger-german-notokenize /home/daza/datasets/ud-treebanks-v2.2/UD_German-GSD/de_gsd-ud-test.conllu.sep.tok > /home/daza/datasets/ud-treebanks-v2.2/UD_German-GSD/de_gsd-ud-test.treetagger.parsed.conll
+
+time cmd/tree-tagger-german-notokenize /home/daza/datasets/TIGER_conll/data_splits/test/Tiger.OldOrth.test.conll.sep.tok > /home/daza/datasets/TIGER_conll/sys_outputs/Tiger.OldOrth.test.TreeTagger.conll
 	
 # RNN Tagger:
-time cmd/rnn-tagger-german-notokenize.sh /home/daza/datasets/ud-treebanks-v2.2/UD_German-GSD/de_gsd-ud-test.conllu.tok > /home/daza/datasets/ud-treebanks-v2.2/UD_German-GSD/de_gsd-ud-test.RNNtagger.parsed.conll
\ No newline at end of file
+time cmd/rnn-tagger-german-notokenize.sh /home/daza/datasets/ud-treebanks-v2.2/UD_German-GSD/de_gsd-ud-test.conllu.tok > /home/daza/datasets/ud-treebanks-v2.2/UD_German-GSD/de_gsd-ud-test.RNNtagger.parsed.conll
+
+time cmd/rnn-tagger-german-notokenize.sh /home/daza/datasets/TIGER_conll/data_splits/test/Tiger.NewOrth.test.conll.tok > /home/daza/datasets/TIGER_conll/sys_outputs/Tiger.NewOrth.test.RNNTagger.conll
diff --git a/systems/evaluate.py b/systems/evaluate.py
index 11ffd51..839cd37 100644
--- a/systems/evaluate.py
+++ b/systems/evaluate.py
@@ -44,8 +44,8 @@
     match, err, symbol = 0, 0, []
     y_gld, y_pred, mistakes = [], [], []
     for i, gld_tok in enumerate(gld.tokens):
-        # sys_lemma = tree_tagger_fixes.get(sys.tokens[i].lemma, sys.tokens[i].lemma)  # Omit TreeTagger "errors" because of article lemma disagreement 
         sys_lemma = sys.tokens[i].lemma
+        # sys_lemma = tree_tagger_fixes.get(sys.tokens[i].lemma, sys.tokens[i].lemma)  # Omit TreeTagger "errors" because of article lemma disagreement 
         y_gld.append(gld_tok.pos_tag)
         y_pred.append(sys_lemma)
         if gld_tok.lemma == sys_lemma:
@@ -87,41 +87,57 @@
         
         ********** TIGER CORPUS ALL ************
         
-            python systems/evaluate.py -t Turku --corpus_name Tiger\
+            python systems/evaluate.py -t Turku --corpus_name Tiger --gld_token_type CoNLL09_Token \
                 --sys_file /home/daza/datasets/TIGER_conll/tiger_turku_parsed.conllu \
                 --gld_file /home/daza/datasets/TIGER_conll/tiger_release_aug07.corrected.16012013.conll09
                 
-            python systems/evaluate.py -t SpaCy --corpus_name Tiger\
+            python systems/evaluate.py -t SpaCy --corpus_name Tiger --gld_token_type CoNLL09_Token \
                 --sys_file /home/daza/datasets/TIGER_conll/tiger_spacy_parsed.conllu \
                 --gld_file /home/daza/datasets/TIGER_conll/tiger_release_aug07.corrected.16012013.conll09
                 
-            python systems/evaluate.py -t RNNTagger --corpus_name Tiger\
+            python systems/evaluate.py -t RNNTagger --corpus_name Tiger --gld_token_type CoNLL09_Token \
                 --sys_file /home/daza/datasets/TIGER_conll/tiger_all.parsed.RNNTagger.conll \
                 --gld_file /home/daza/datasets/TIGER_conll/tiger_release_aug07.corrected.16012013.conll09
             
-            python systems/evaluate.py -t TreeTagger --corpus_name Tiger\
+            python systems/evaluate.py -t TreeTagger --corpus_name Tiger --gld_token_type CoNLL09_Token \
                 --sys_file /home/daza/datasets/TIGER_conll/tiger_all.parsed.TreeTagger.conll \
                 --gld_file /home/daza/datasets/TIGER_conll/tiger_release_aug07.corrected.16012013.conll09
             
+        
+        ********** TIGER CORPUS TEST ************
+        
+        python systems/evaluate.py -t SpaCy --corpus_name TigerTestOld \
+        --sys_file /home/daza/datasets/TIGER_conll/tiger_spacy_parsed.test.conllu \
+        --gld_file /home/daza/datasets/TIGER_conll/data_splits/test/Tiger.OldOrth.test.conll
+        
+        python systems/evaluate.py -t SpaCy --corpus_name TigerTestNew \
+        --sys_file /home/daza/datasets/TIGER_conll/Tiger.NewOrth.test.spacy_parsed.conllu\
+        --gld_file /home/daza/datasets/TIGER_conll/data_splits/test/Tiger.NewOrth.test.conll
+        
+        
+        python systems/evaluate.py -t Turku --corpus_name TigerTestNew \
+        --sys_file /home/daza/datasets/TIGER_conll/sys_outputs/Tiger.NewOrth.test.turku_parsed.conllu \
+        --gld_file /home/daza/datasets/TIGER_conll/data_splits/test/Tiger.NewOrth.test.conll
+        
         ********** UNIVERSAL DEPENDENCIES TEST-SET ************
 
-            python systems/evaluate.py -t Turku --gld_token_type CoNLLUP_Token --corpus_name DE_GSD\
+            python systems/evaluate.py -t Turku --corpus_name DE_GSD \
             --sys_file /home/daza/datasets/ud-treebanks-v2.2/UD_German-GSD/de_gsd-ud-test.conllu.parsed.0.conllu \
             --gld_file /home/daza/datasets/ud-treebanks-v2.2/UD_German-GSD/de_gsd-ud-test.conllu    
             
-            python systems/evaluate.py -t SpaCyGL --gld_token_type CoNLLUP_Token --corpus_name DE_GSD\
+            python systems/evaluate.py -t SpaCyGL --corpus_name DE_GSD \
                 --sys_file /home/daza/datasets/ud-treebanks-v2.2/UD_German-GSD/de_gsd-ud-test.parsed.germalemma.conllu \
                 --gld_file /home/daza/datasets/ud-treebanks-v2.2/UD_German-GSD/de_gsd-ud-test.conllu    
             
-            python systems/evaluate.py -t SpaCy --gld_token_type CoNLLUP_Token --corpus_name DE_GSD\
+            python systems/evaluate.py -t SpaCy --corpus_name DE_GSD \
                 --sys_file /home/daza/datasets/ud-treebanks-v2.2/UD_German-GSD/de_gsd-ud-test.parsed.conllu \
                 --gld_file /home/daza/datasets/ud-treebanks-v2.2/UD_German-GSD/de_gsd-ud-test.conllu 
                 
-            python systems/evaluate.py -t RNNTagger --gld_token_type CoNLLUP_Token --corpus_name DE_GSD\
+            python systems/evaluate.py -t RNNTagger --corpus_name DE_GSD \
                 --sys_file /home/daza/datasets/ud-treebanks-v2.2/UD_German-GSD/de_gsd-ud-test.RNNtagger.parsed.conll \
                 --gld_file /home/daza/datasets/ud-treebanks-v2.2/UD_German-GSD/de_gsd-ud-test.conllu
             
-            python systems/evaluate.py -t TreeTagger --gld_token_type CoNLLUP_Token --corpus_name DE_GSD\
+            python systems/evaluate.py -t TreeTagger --corpus_name DE_GSD \
                 --sys_file /home/daza/datasets/ud-treebanks-v2.2/UD_German-GSD/de_gsd-ud-test.treetagger.parsed.conll \
                 --gld_file /home/daza/datasets/ud-treebanks-v2.2/UD_German-GSD/de_gsd-ud-test.conllu
                 
@@ -133,9 +149,9 @@
     parser = argparse.ArgumentParser()
     parser.add_argument("-s", "--sys_file", help="System output in CoNLL-U Format", required=True)
     parser.add_argument("-g", "--gld_file", help="Gold Labels to evaluate in CoNLL-U Format", required=True)
-    parser.add_argument("-t", "--type_sys", help="Which system produced the outputs", default="system")
     parser.add_argument("-c", "--corpus_name", help="Corpus Name for Gold Labels", required=True)
-    parser.add_argument("-gtt", "--gld_token_type", help="CoNLL Format of the Gold Data", default="CoNLL09_Token")
+    parser.add_argument("-t", "--type_sys", help="Which system produced the outputs", default="system")
+    parser.add_argument("-gtt", "--gld_token_type", help="CoNLL Format of the Gold Data", default="CoNLLUP_Token")
     parser.add_argument("-cs", "--comment_str", help="CoNLL Format of comentaries inside the file", default="#")
     args = parser.parse_args()
     
diff --git a/systems/parse_spacy.py b/systems/parse_spacy.py
index 1a32b67..3c56233 100644
--- a/systems/parse_spacy.py
+++ b/systems/parse_spacy.py
@@ -51,26 +51,34 @@
 	"""
 		EXAMPLE:
 		--- TIGER Classic Orthography ---
-			python systems/parse_spacy.py --corpus_name Tiger \
+			python systems/parse_spacy.py --corpus_name Tiger --gld_token_type CoNLL09_Token \
 				-i /home/daza/datasets/TIGER_conll/tiger_release_aug07.corrected.16012013.conll09 \
 				-o /home/daza/datasets/TIGER_conll/tiger_spacy_parsed.conllu \
 				-t /home/daza/datasets/TIGER_conll/tiger_all.txt
+			
+			python systems/parse_spacy.py --corpus_name TigerOld_test \
+			-i /home/daza/datasets/TIGER_conll/data_splits/test/Tiger.OldOrth.test.conll \
+			-o /home/daza/datasets/TIGER_conll/tiger_spacy_parsed.test.conllu
 		
 		--- TIGER New Orthography ---
-			python systems/parse_spacy.py --corpus_name TigerNew --gld_token_type CoNLLUP_Token \
+			python systems/parse_spacy.py --corpus_name TigerNew \
 				-i /home/daza/datasets/TIGER_conll/Tiger.NewOrth.train.conll \
 				-o /home/daza/datasets/TIGER_conll/Tiger.NewOrth.train.spacy_parsed.conllu \
 				-t /home/daza/datasets/TIGER_conll/Tiger.NewOrth.train.txt
+			
+			python systems/parse_spacy.py --corpus_name TigerNew_test \
+			-i /home/daza/datasets/TIGER_conll/data_splits/test/Tiger.NewOrth.test.conll \
+			-o /home/daza/datasets/TIGER_conll/Tiger.NewOrth.test.spacy_parsed.conllu
 		
 		--- German GSD Universal Deps ---
-			python systems/parse_spacy.py --corpus_name DE_GSD --gld_token_type CoNLLUP_Token \
+			python systems/parse_spacy.py --corpus_name DE_GSD \
 				-i /home/daza/datasets/ud-treebanks-v2.2/UD_German-GSD/de_gsd-ud-test.conllu \
 				-o /home/daza/datasets/ud-treebanks-v2.2/UD_German-GSD/de_gsd-ud-test.parsed.germalemma.conllu \
 				-t /home/daza/datasets/ud-treebanks-v2.2/UD_German-GSD/de_gsd-ud-test.txt
 				
 			
 		--- Real Data TEST  ---
-		time python systems/parse_spacy.py --corpus_name DeReKo_a00 --gld_token_type CoNLLUP_Token --comment_str "#" \
+		time python systems/parse_spacy.py --corpus_name DeReKo_a00 --comment_str "#" \
 			-i /export/netapp/kupietz/N-GRAMM-STUDIE/conllu/a00.conllu.gz \
 			-o /export/netapp/kupietz/N-GRAMM-STUDIE/conllu/0_SpaCyParsed/a00.spacy.gl.conllu
 			
@@ -81,15 +89,16 @@
 	parser.add_argument("-n", "--corpus_name", help="Corpus Name", default="Corpus")
 	parser.add_argument("-o", "--output_file", help="File where the Predictions will be saved", required=True)
 	parser.add_argument("-t", "--text_file", help="Output Plain Text File", default=None)
-	parser.add_argument("-gtt", "--gld_token_type", help="CoNLL Format of the Gold Data", default="CoNLL09_Token")
+	parser.add_argument("-sm", "--spacy_model", help="Spacy model containing the pipeline to tag", default="de_core_news_lg")
+	parser.add_argument("-gtt", "--gld_token_type", help="CoNLL Format of the Gold Data", default="CoNLLUP_Token")
 	parser.add_argument("-ugl", "--use_germalemma", help="Use Germalemma lemmatizer on top of SpaCy", default="True")
 	parser.add_argument("-c", "--comment_str", help="CoNLL Format of comentaries inside the file", default="#")
 	args = parser.parse_args()
 	
 	file_has_next, chunk_ix = True, 0
-	CHUNK_SIZE = 100000
-	SPACY_BATCH = 10000
-	SPACY_PROC = 50
+	CHUNK_SIZE = 20000
+	SPACY_BATCH = 2000
+	SPACY_PROC = 10
 	
 	# =====================================================================================
 	#                    LOGGING INFO ...
@@ -103,7 +112,7 @@
 	# =====================================================================================
 	#                    POS TAG DOCUMENTS
 	# =====================================================================================
-	spacy_de = spacy.load("de_core_news_lg", disable=["ner", "parser"])
+	spacy_de = spacy.load(args.spacy_model, disable=["ner", "parser"])
 	spacy_de.tokenizer = WhitespaceTokenizer(spacy_de.vocab) # We won't re-tokenize to respect how the source CoNLL are tokenized!
 	write_out = open(args.output_file, "w")
 	lemmatizer = GermaLemma()
diff --git a/systems/parse_spacy3.py b/systems/parse_spacy3.py
new file mode 100644
index 0000000..14e5a9f
--- /dev/null
+++ b/systems/parse_spacy3.py
@@ -0,0 +1,123 @@
+import argparse, os
+import spacy
+from spacy.language import Language
+from spacy.tokens import Doc
+import logging, sys, time
+from lib.CoNLL_Annotation import get_token_type
+import my_utils.file_utils as fu
+from germalemma import GermaLemma
+
+
+@Language.factory("my_component")
+class WhitespaceTokenizer(object):
+	def __init__(self, nlp, name):
+		self.vocab = nlp.vocab
+
+	def __call__(self, text):
+		words = text.split(' ')
+		# All tokens 'own' a subsequent space character in this tokenizer
+		spaces = [True] * len(words)
+		return Doc(self.vocab, words=words, spaces=spaces)
+
+
+def get_conll_str(anno_obj, spacy_doc, use_germalemma):
+	#  First lines are comments. (metadata)
+	conll_lines = anno_obj.metadata # Then we want: [ID, FORM, LEMMA, UPOS, XPOS, FEATS, HEAD, DEPREL, DEPS, MISC]
+	for ix, token in enumerate(spacy_doc):
+		if use_germalemma == "True":
+			content = (str(ix), token.text, find_germalemma(token.text, token.tag_, token.lemma_), token.pos_, token.tag_, "_", "_", "_", "_", "_")
+		else:
+			content = (str(ix), token.text, token.lemma_, token.pos_, token.tag_, "_", "_", "_", "_", "_") # Pure SpaCy!
+		conll_lines.append("\t".join(content))
+	return "\n".join(conll_lines)
+
+
+# def freeling_lemma_lookup():
+# 	dicts_path = "/home/daza/Frameworks/FreeLing/data/de/dictionary/entries/"
+	
+def find_germalemma(word, pos, spacy_lemma):
+	simplify_pos = {"ADJA":"ADJ", "ADJD":"ADJ",
+					"NA":"N", "NE":"N", "NN":"N",
+					"ADV":"ADV", "PAV":"ADV", "PROAV":"ADV", "PAVREL":"ADV", "PWAV":"ADV", "PWAVREL":"ADV",
+					"VAFIN":"V", "VAIMP":"V", "VAINF":"V", "VAPP":"V", "VMFIN":"V", "VMINF":"V",
+					"VMPP":"V", "VVFIN":"V", "VVIMP":"V", "VVINF":"V", "VVIZU":"V","VVPP":"V"
+				}
+	# simplify_pos = {"VERB": "V", "ADV": "ADV", "ADJ": "ADJ", "NOUN":"N", "PROPN": "N"}
+	try:
+		return lemmatizer.find_lemma(word, simplify_pos.get(pos, "UNK"))
+	except:
+		return spacy_lemma
+
+
+if __name__ == "__main__":
+	"""
+		EXAMPLE:
+		--- TIGER Classic Orthography ---
+			python systems/parse_spacy3.py --corpus_name TigerTestNew \
+				-i /home/daza/datasets/TIGER_conll/data_splits/test/Tiger.NewOrth.test.conll \
+				-o /home/daza/datasets/TIGER_conll/tiger_spacy3_parsed.conllu
+	"""
+	
+	parser = argparse.ArgumentParser()
+	parser.add_argument("-i", "--input_file", help="Input Corpus", required=True)
+	parser.add_argument("-n", "--corpus_name", help="Corpus Name", default="Corpus")
+	parser.add_argument("-o", "--output_file", help="File where the Predictions will be saved", required=True)
+	parser.add_argument("-sm", "--spacy_model", help="Spacy model containing the pipeline to tag", default="de_core_news_sm")
+	parser.add_argument("-gtt", "--gld_token_type", help="CoNLL Format of the Gold Data", default="CoNLLUP_Token")
+	parser.add_argument("-ugl", "--use_germalemma", help="Use Germalemma lemmatizer on top of SpaCy", default="True")
+	parser.add_argument("-c", "--comment_str", help="CoNLL Format of comentaries inside the file", default="#")
+	args = parser.parse_args()
+	
+	file_has_next, chunk_ix = True, 0
+	CHUNK_SIZE = 1000
+	SPACY_BATCH = 100
+	SPACY_PROC = 4
+	
+	# =====================================================================================
+	#                    LOGGING INFO ...
+	# =====================================================================================
+	logger = logging.getLogger(__name__)
+	console_hdlr = logging.StreamHandler(sys.stdout)
+	file_hdlr = logging.FileHandler(filename=f"logs/Parse_{args.corpus_name}.SpaCy.log")
+	logging.basicConfig(level=logging.INFO, handlers=[console_hdlr, file_hdlr])
+	logger.info(f"Chunking {args.corpus_name} Corpus in chunks of {CHUNK_SIZE} Sentences")
+	
+	# =====================================================================================
+	#                    POS TAG DOCUMENTS
+	# =====================================================================================
+	
+	if os.path.exists(args.spacy_model):
+		pass # Load Custom Trained model
+	else:
+		# try:
+		spacy_de = spacy.load(args.spacy_model, disable=["ner", "parser"])
+		spacy_de.tokenizer = WhitespaceTokenizer(spacy_de, "keep_original_tokens") # We won't re-tokenize to respect how the source CoNLL are tokenized!
+		# except:
+		# 	print(f"Check if model {args.spacy_model} is a valid SpaCy Pipeline or if the Path containing the trained model exists!")
+		# 	exit()
+	
+	write_out = open(args.output_file, "w")
+	lemmatizer = GermaLemma()
+	
+	if ".gz" == args.input_file[-3:]:
+		in_file = fu.expand_file(args.input_file)
+	else:
+		in_file = args.input_file
+	
+	start = time.time()
+	total_processed_sents = 0
+	line_generator = fu.file_generator(in_file)
+	while file_has_next:
+		annos, file_has_next = fu.get_file_annos_chunk(line_generator, chunk_size=CHUNK_SIZE, 		token_class=get_token_type(args.gld_token_type), comment_str=args.comment_str)
+		if len(annos) == 0: break
+		total_processed_sents += len(annos)
+		logger.info(f"Already processed {total_processed_sents} sentences...")
+		sents = [a.get_sentence() for a in annos]
+		for ix, doc in enumerate(spacy_de.pipe(sents, batch_size=SPACY_BATCH, n_process=SPACY_PROC)):
+			conll_str = get_conll_str(annos[ix], doc, use_germalemma=args.use_germalemma)
+			write_out.write(conll_str)
+			write_out.write("\n\n")
+			
+	end = time.time()
+	logger.info(f"Processing {args.corpus_name} took {(end - start)} seconds!")
+			
\ No newline at end of file
diff --git a/systems/parse_turku.py b/systems/parse_turku.py
index 7694504..284dbd4 100644
--- a/systems/parse_turku.py
+++ b/systems/parse_turku.py
@@ -6,11 +6,6 @@
 import my_utils.file_utils as fu
 import argparse
 
-
-TIGER_CORPUS = "/home/daza/datasets/TIGER_conll/tiger_release_aug07.corrected.16012013.conll09"
-DE_GSD_CORPUS = "/home/daza/datasets/ud-treebanks-v2.2/UD_German-GSD/de_gsd-ud-test.conllu"
-
-
 if __name__ == "__main__":
     
     """
@@ -18,6 +13,9 @@
         
         python systems/parse_turku.py --corpus_name DE_GSD --gld_token_type CoNLLUP_Token \
             -i /home/daza/datasets/ud-treebanks-v2.2/UD_German-GSD/de_gsd-ud-test.conllu
+        
+        python systems/parse_turku.py --corpus_name TigerTestOld --gld_token_type CoNLLUP_Token \
+        -i /home/daza/datasets/TIGER_conll/data_splits/test/Tiger.OldOrth.test.conll
             
     """