Cleanup code
diff --git a/.gitignore b/.gitignore
index ecfe0bc..c9d1bec 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1 +1,3 @@
-sandbox
\ No newline at end of file
+sandbox
+tokenizer.code-workspace
+*.info
\ No newline at end of file
diff --git a/datokenizer.go b/datokenizer.go
index e1f55dc..e128d49 100644
--- a/datokenizer.go
+++ b/datokenizer.go
@@ -6,6 +6,12 @@
  * and written by Mans Hulden.
  */
 
+// TODO:
+// - replace maxSize with the check value
+// - Strip first state and make everything start with 0!
+// - Serialize!
+// - Split Tokenizer and DATokenizer
+
 import (
 	"bufio"
 	"compress/gzip"
@@ -15,6 +21,8 @@
 	"strconv"
 	"strings"
 	"unicode/utf8"
+
+	"github.com/rs/zerolog/log"
 )
 
 const (
@@ -23,6 +31,7 @@
 	STATES  = 3
 	NONE    = 4
 	NEWLINE = '\u000a'
+	DEBUG   = false
 )
 
 // Special symbols in sigma
@@ -37,49 +46,49 @@
 }
 
 type edge struct {
-	in     int
-	out    int
-	target int
+	inSym  int
+	outSym int
+	end    int
 }
 
 type Tokenizer struct {
 	sigma       map[rune]int
-	sigma_rev   map[int]rune
-	arccount    int
-	statecount  int
-	sigmacount  int
-	maxsize     int
+	sigmaRev    map[int]rune
+	arcCount    int
+	stateCount  int
+	sigmaCount  int
+	maxSize     int
 	array       []int
 	transitions []map[int]*edge
 }
 
-func parse_file(file string) *Tokenizer {
+func ParseFile(file string) *Tokenizer {
 	f, err := os.Open(file)
 	if err != nil {
-		panic(err)
+		log.Error().Err(err)
+		os.Exit(0)
 	}
 	defer f.Close()
 
 	gz, err := gzip.NewReader(f)
 	if err != nil {
-		panic(err)
+		log.Error().Err(err)
+		os.Exit(0)
 	}
 	defer gz.Close()
 
-	return parse(gz)
+	return Parse(gz)
 }
 
-func parse(ior io.Reader) *Tokenizer {
+func Parse(ior io.Reader) *Tokenizer {
 	r := bufio.NewReader(ior)
 
 	tok := &Tokenizer{
-		sigma:     make(map[rune]int),
-		sigma_rev: make(map[int]rune),
+		sigma:    make(map[rune]int),
+		sigmaRev: make(map[int]rune),
 	}
 
-	final := false
-
-	var arrstate, arrin, arrout, arrtarget, arrfinal int
+	var state, inSym, outSym, end, final int
 
 	mode := 0
 	var elem []string
@@ -91,7 +100,8 @@
 			if err == io.EOF {
 				break
 			}
-			panic(err)
+			log.Error().Err(err)
+			os.Exit(0)
 		}
 		if strings.HasPrefix(line, "##foma-net") {
 			continue
@@ -105,8 +115,8 @@
 
 			// Adds a final transition symbol to sigma
 			// written as '#' in Mizobuchi et al (2000)
-			tok.sigmacount++
-			FINAL = tok.sigmacount
+			tok.sigmaCount++
+			FINAL = tok.sigmaCount
 			continue
 		}
 		if strings.HasPrefix(line, "##sigma##") {
@@ -138,26 +148,30 @@
 					fmt.Println("name:             " + elem[12])
 				*/
 				if elem[6] != "1" {
-					panic("The FST needs to be deterministic")
+					log.Error().Msg("The FST needs to be deterministic")
+					os.Exit(1)
 				}
 				if elem[9] != "1" {
-					panic("The FST needs to be epsilon free")
+					log.Error().Msg("The FST needs to be epsilon free")
+					os.Exit(1)
 				}
 
 				elemint[0], err = strconv.Atoi(elem[1])
 				if err != nil {
-					panic("Can't read arccount")
+					log.Error().Msg("Can't read arccount")
+					os.Exit(1)
 				}
-				tok.arccount = elemint[0]
+				tok.arcCount = elemint[0]
 
 				// States start at 1 in Mizobuchi et al (2000),
 				// as the state 0 is associated with a fail.
 				// Initialize states and transitions
 				elemint[0], err = strconv.Atoi(elem[2])
 				if err != nil {
-					panic("Can't read statecount")
+					log.Error().Msg("Can't read statecount")
+					os.Exit(1)
 				}
-				tok.statecount = elemint[0]
+				tok.stateCount = elemint[0]
 				tok.transitions = make([]map[int]*edge, elemint[0]+1)
 				continue
 			}
@@ -200,74 +214,72 @@
 				switch len(elem) {
 				case 5:
 					{
-						arrstate = elemint[0]
-						arrin = elemint[1]
-						arrout = elemint[2]
-						arrtarget = elemint[3]
-						arrfinal = elemint[4]
+						state = elemint[0]
+						inSym = elemint[1]
+						outSym = elemint[2]
+						end = elemint[3]
+						final = elemint[4]
 					}
 				case 4:
 					{
 						if elemint[1] == -1 {
-							arrstate = elemint[0]
-							arrfinal = elemint[3]
+							state = elemint[0]
+							final = elemint[3]
 						} else {
-							arrstate = elemint[0]
-							arrin = elemint[1]
-							arrtarget = elemint[2]
-							arrfinal = elemint[3]
-							arrout = arrin
+							state = elemint[0]
+							inSym = elemint[1]
+							end = elemint[2]
+							final = elemint[3]
+							outSym = inSym
 						}
 					}
 				case 3:
 					{
-						arrin = elemint[0]
-						arrout = elemint[1]
-						arrtarget = elemint[2]
+						inSym = elemint[0]
+						outSym = elemint[1]
+						end = elemint[2]
 					}
 				case 2:
 					{
-						arrin = elemint[0]
-						arrtarget = elemint[1]
-						arrout = arrin
+						inSym = elemint[0]
+						end = elemint[1]
+						outSym = inSym
 					}
 				}
 
-				// This collects all edges until arrstate changes
-				if arrfinal == 1 {
-					final = true
-				} else {
-					final = false
-				}
-
 				// While the states in foma start with 0, the states in the
 				// Mizobuchi FSA start with one - so we increase every state by 1.
 
-				/*
-					if arrin != arrout && arrin != EPSILON && tok.sigma_rev[arrin] != '\n' {
-						panic("Problem: " + strconv.Itoa(arrstate) + " -> " + strconv.Itoa(arrtarget) + " (" + strconv.Itoa(arrin) + ":" + strconv.Itoa(arrout) + ") ")
-					}
-				*/
-				if arrin != arrout {
-					if arrin == EPSILON && tok.sigma_rev[arrout] == NEWLINE {
-					} else if arrin != EPSILON && arrout == EPSILON {
-					} else {
-						panic(
-							"Problem: " +
-								strconv.Itoa(arrstate) +
-								" -> " + strconv.Itoa(arrtarget) +
+				if inSym != outSym {
+
+					// Allow any epsilon to become a newline
+					if !(inSym == EPSILON && tok.sigmaRev[outSym] == NEWLINE) &&
+
+						// Allow any whitespace to be ignored
+						!(inSym != EPSILON && outSym == EPSILON) &&
+
+						// Allow any whitespace to become a new line
+						!(tok.sigmaRev[outSym] == NEWLINE) {
+
+						log.Error().Msg(
+							"Unsupported transition: " +
+								strconv.Itoa(state) +
+								" -> " + strconv.Itoa(end) +
 								" (" +
-								strconv.Itoa(arrin) +
+								strconv.Itoa(inSym) +
 								":" +
-								strconv.Itoa(arrout) +
+								strconv.Itoa(outSym) +
 								") (" +
-								string(tok.sigma_rev[arrin]) +
+								string(tok.sigmaRev[inSym]) +
 								":" +
-								string(tok.sigma_rev[arrout]) +
+								string(tok.sigmaRev[outSym]) +
 								")")
+						os.Exit(1)
 					}
 				}
 
+				// This collects all edges until arrstate changes
+
 				// TODO:
 				//   if arrin == EPSILON && arrout == TOKENEND, mark state as newline
 				//   if the next transition is the same, remove TOKENEND and add SENTENCEEND
@@ -277,35 +289,39 @@
 				//   if arrout == EPSILON, mark the transition as NOTOKEN
 
 				targetObj := &edge{
-					in:     arrin,
-					out:    arrout,
-					target: arrtarget + 1,
+					inSym:  inSym,
+					outSym: outSym,
+					end:    end + 1,
 				}
 
-				// Initialize outgoing state
-				if tok.transitions[arrstate+1] == nil {
-					tok.transitions[arrstate+1] = make(map[int]*edge)
+				// Initialize outgoing states
+				if tok.transitions[state+1] == nil {
+					tok.transitions[state+1] = make(map[int]*edge)
 				}
 
-				if arrin >= 0 {
-					tok.transitions[arrstate+1][arrin] = targetObj
+				// Ignore transitions with invalid symbols
+				if inSym >= 0 {
+					tok.transitions[state+1][inSym] = targetObj
 				}
 
-				if final {
-					tok.transitions[arrstate+1][FINAL] = &edge{}
+				// Add final transition
+				if final == 1 {
+					tok.transitions[state+1][FINAL] = &edge{}
 				}
 
-				fmt.Println("Add",
-					arrstate+1, "->", arrtarget+1,
-					"(",
-					arrin,
-					":",
-					arrout,
-					") (",
-					string(tok.sigma_rev[arrin]),
-					":",
-					string(tok.sigma_rev[arrout]),
-					")")
+				if DEBUG {
+					fmt.Println("Add",
+						state+1, "->", end+1,
+						"(",
+						inSym,
+						":",
+						outSym,
+						") (",
+						string(tok.sigmaRev[inSym]),
+						":",
+						string(tok.sigmaRev[outSym]),
+						")")
+				}
 
 				continue
 			}
@@ -317,10 +333,11 @@
 				number, err := strconv.Atoi(elem[0])
 
 				if err != nil {
-					panic(err)
+					log.Error().Err(err)
+					os.Exit(0)
 				}
 
-				tok.sigmacount = number
+				tok.sigmaCount = number
 
 				var symbol rune
 
@@ -348,23 +365,27 @@
 							continue
 						}
 					default:
-						panic("MCS not supported: " + line)
+						{
+							log.Error().Msg("MCS not supported: " + line)
+							os.Exit(1)
+						}
 					}
 
-					// Probably a new line symbol
-				} else {
+				} else { // Probably a new line symbol
 					line, err = r.ReadString('\n')
 					if err != nil {
-						panic(err)
+						log.Error().Err(err)
+						os.Exit(0)
 					}
 					if len(line) != 1 {
-						panic("MCS not supported:" + line)
+						log.Error().Msg("MCS not supported:" + line)
+						os.Exit(0)
 					}
-					symbol = rune('\n')
+					symbol = rune(NEWLINE)
 				}
 
 				tok.sigma[symbol] = number
-				tok.sigma_rev[number] = symbol
+				tok.sigmaRev[number] = symbol
 			}
 		}
 	}
@@ -373,110 +394,129 @@
 }
 
 // Implementation of Mizobuchi et al (2000), p.128
-func (tok *Tokenizer) buildDA() *Tokenizer {
+func (tok *Tokenizer) ToDoubleArray() *Tokenizer {
 
 	mark := 0
 	size := 0
 
 	// Create a mapping from s to t
-	table := make([]*mapping, tok.arccount+1)
+	table := make([]*mapping, tok.arcCount+1)
 
 	table[size] = &mapping{source: 1, target: 1}
 	size++
 
-	A := make([]int, 0, 256)
+	// Allocate space for the outgoing symbol range
+	A := make([]int, 0, tok.sigmaCount)
 
 	for mark < size {
 		s := table[mark].source // This is a state in Ms
 		t := table[mark].target // This is a state in Mt
 		mark++
-		//		fmt.Println("Increase mark", mark)
-		// St := append(St, t)
+
+		// Following the paper, here the state t can be remembered
+		// in the set of states St
 		A = A[:0]
 		tok.get_set(s, &A)
 
-		// fmt.Println("Outgoing arcs from t", t, A)
+		// Set base to the first free slot in the double array
+		tok.setBase(t, tok.xCheck(A))
 
-		// tok.array[t].base = tok.x_check(A)
-		tok.set_base(t, tok.x_check(A))
-
+		// Iterate over all outgoing symbols
 		for _, a := range A {
 
 			if a != FINAL {
-				s1 := tok.transitions[s][a].target // g(s, a)
 
-				// fmt.Println("Found", s, "to", s1, "via", a)
+				// Aka g(s, a)
+				s1 := tok.transitions[s][a].end
 
-				t1 := tok.get_base(t) + a
-				tok.set_check(t1, t)
+				// Store the transition
+				t1 := tok.getBase(t) + a
+				tok.setCheck(t1, t)
 
+				// Check for representative states
 				r := in_table(s1, table, size)
+
 				if r == 0 {
-					// fmt.Println("Increase size", t1)
+					// Remember the mapping
 					table[size] = &mapping{source: s1, target: t1}
 					size++
 				} else {
-					//fmt.Println("Rep is there", t1, r)
-					tok.set_base(t1, -1*r)
-					// tok.array[t1].base = -1 * r
+					// Overwrite with the representative state
+					tok.setBase(t1, -1*r)
 				}
 			} else {
-				fmt.Println("I set a final")
-				// t1 := tok.array[t].base + FINAL
-				t1 := tok.get_base(t) + FINAL
-				// tok.array[t1].check = t
-				tok.set_check(t1, t)
+				// Store a final transition
+				tok.setCheck(tok.getBase(t)+FINAL, t)
 			}
 		}
 	}
 
 	// Following Mizobuchi et al (2000) the size of the
 	// FSA should be stored in check(1).
-	tok.set_check(1, tok.maxsize+1)
-	tok.array = tok.array[:tok.maxsize+1]
+	tok.setCheck(1, tok.maxSize+1)
+	tok.array = tok.array[:tok.maxSize+1]
 	return tok
 }
 
+// Resize double array when necessary
 func (tok *Tokenizer) resize(l int) {
+	// TODO:
+	//   This is a bit too aggressive atm and should be calmed down.
 	if len(tok.array) <= l {
 		tok.array = append(tok.array, make([]int, l)...)
 	}
 }
 
-func (tok *Tokenizer) set_base(p int, v int) {
+// Set base value in double array
+func (tok *Tokenizer) setBase(p int, v int) {
 	l := p*2 + 1
 	tok.resize(l)
-	if tok.maxsize < l {
-		tok.maxsize = l
+	if tok.maxSize < l {
+		tok.maxSize = l
 	}
 	tok.array[p*2] = v
 }
 
-func (tok *Tokenizer) get_base(p int) int {
+// Get base value in double array
+func (tok *Tokenizer) getBase(p int) int {
 	if p*2 >= len(tok.array) {
 		return 0
 	}
 	return tok.array[p*2]
 }
 
-func (tok *Tokenizer) set_check(p int, v int) {
+// Set check value in double array
+func (tok *Tokenizer) setCheck(p int, v int) {
 	l := p*2 + 1
 	tok.resize(l)
-	if tok.maxsize < l {
-		tok.maxsize = l
+	if tok.maxSize < l {
+		tok.maxSize = l
 	}
 	tok.array[(p*2)+1] = v
 }
 
-func (tok *Tokenizer) get_check(p int) int {
+// Get check value in double array
+func (tok *Tokenizer) getCheck(p int) int {
 	if (p*2)+1 >= len(tok.array) {
 		return 0
 	}
 	return tok.array[(p*2)+1]
 }
 
+// Set size of double array
+func (tok *Tokenizer) setSize(p, v int) {
+	tok.setCheck(1, v)
+}
+
+// Get size of double array
+func (tok *Tokenizer) getSize(p int) int {
+	return tok.getCheck(1)
+}
+
 // Check the table if a mapping of s
-// exists and return this as a representative
+// exists and return this as a representative.
+// Currently iterates through the whole table
+// in a bruteforce manner.
 func in_table(s int, table []*mapping, size int) int {
 	for x := 0; x < size; x++ {
 		if table[x].source == s {
@@ -499,114 +539,123 @@
 // structure until it finds a gap that fits all outgoing transitions
 // of the state. This is extremely slow, but is only necessary in the
 // construction phase of the tokenizer.
-func (tok *Tokenizer) x_check(symbols []int) int {
-	// see https://github.com/bramstein/datrie/blob/master/lib/trie.js
+func (tok *Tokenizer) xCheck(symbols []int) int {
+
+	// Start at the first entry of the double array list
 	base := 1
 
-	// 	fmt.Println("Resize", len(tok.linarray), "<", ((base + FINAL + 1) * 2))
-
 OVERLAP:
+
+	// Resize the array if necessary
 	tok.resize((base + FINAL) * 2)
 	for _, a := range symbols {
-		// if tok.array[base+a].check != 0 {
-		if tok.get_check(base+a) != 0 {
+		if tok.getCheck(base+a) != 0 {
 			base++
 			goto OVERLAP
 		}
 	}
-	//	fmt.Println("Found a nice place at", base, "for", len(symbols))
 	return base
 }
 
-// Based on Mizobuchi et al (2000), p. 129
-// Added support for IDENTITY, UNKNOWN and EPSILON
-func (tok *Tokenizer) match(input string) bool {
-	t := 1 // Start position
-	chars := []rune(input)
-	i := 0
+// Match an input string against the double array
+// FSA.
+//
+// Based on Mizobuchi et al (2000), p. 129,
+// with additional support for IDENTITY, UNKNOWN
+// and EPSILON transitions.
+func (tok *Tokenizer) Match(input string) bool {
 	var a int
 	var tu int
 	var ok bool
 
-	//	fmt.Println("Length of string is", len(chars))
+	t := 1 // Initial state
+	chars := []rune(input)
+	i := 0
+
 	for i < len(chars) {
 		a, ok = tok.sigma[chars[i]]
 
-		// Support identity symbol if char not in sigma
+		// Support identity symbol if character is not in sigma
 		if !ok && IDENTITY != -1 {
-			fmt.Println("IDENTITY symbol", string(chars[i]), "->", IDENTITY)
+			if DEBUG {
+				fmt.Println("IDENTITY symbol", string(chars[i]), "->", IDENTITY)
+			}
 			a = IDENTITY
-		} else {
+		} else if DEBUG {
 			fmt.Println("Sigma transition is okay for [", string(chars[i]), "]")
 		}
 		tu = t
 	CHECK:
-		t = tok.get_base(tu) + a
-		if t > tok.get_check(1) || tok.get_check(t) != tu {
-			fmt.Println("Match is not fine!", t, "and", tok.get_check(t), "vs", tu)
+		t = tok.getBase(tu) + a
 
-			// Try again with unknown symbol, in case identity failed
-			if !ok {
-				if a == IDENTITY {
-					fmt.Println("UNKNOWN symbol", string(chars[i]), "->", UNKNOWN)
-					a = UNKNOWN
-					goto CHECK
-				} else if a == UNKNOWN {
-					fmt.Println("aEPSILON symbol", string(chars[i]), "->", EPSILON)
-					a = EPSILON
-					// In the worst case, this checks epsilon twice at the same state -
-					// here and at the end
-					goto CHECK
-				}
-			} else if a != EPSILON {
-				fmt.Println("bEPSILON symbol", string(chars[i]), "->", EPSILON)
-				a = EPSILON
-				// In the worst case, this checks epsilon twice at the same state -
-				// here and at the end
-				goto CHECK
+		// Check if the transition is valid according to the double array
+		if t > tok.getCheck(1) || tok.getCheck(t) != tu {
+
+			if DEBUG {
+				fmt.Println("Match is not fine!", t, "and", tok.getCheck(t), "vs", tu)
 			}
-			break
-		} else if tok.get_base(t) < 0 {
+
+			if !ok && a == IDENTITY {
+				// Try again with unknown symbol, in case identity failed
+				if DEBUG {
+					fmt.Println("UNKNOWN symbol", string(chars[i]), "->", UNKNOWN)
+				}
+				a = UNKNOWN
+
+			} else if a != EPSILON {
+				// Try again with epsilon symbol, in case everything else failed
+				if DEBUG {
+					fmt.Println("EPSILON symbol", string(chars[i]), "->", EPSILON)
+				}
+				a = EPSILON
+			} else {
+				break
+			}
+			goto CHECK
+		} else if tok.getBase(t) < 0 {
 			// Move to representative state
-			t = -1 * tok.get_base(t)
+			t = -1 * tok.getBase(t)
 		}
+
+		// Transition is fine
 		if a != EPSILON {
+			// Character consumed
 			i++
 		}
+		// TODO:
+		//   Prevent endless epsilon loops!
 	}
 
-	if i == len(chars) {
-		fmt.Println("At the end")
-	} else {
-		fmt.Println("Not at the end")
+	if i != len(chars) {
+		if DEBUG {
+			fmt.Println("Not at the end")
+		}
 		return false
 	}
 
-	// fmt.Println("Hmm...", tok.get_check(tok.get_base(t)+FINAL), "-", t)
-
 FINALCHECK:
-	if tok.get_check(tok.get_base(t)+FINAL) == t {
+
+	// Automaton is in a final state
+	if tok.getCheck(tok.getBase(t)+FINAL) == t {
 		return true
 	}
 
+	// Check epsilon transitions until a final state is reached
 	tu = t
 	a = EPSILON
+	t = tok.getBase(tu) + a
 
-	t = tok.get_base(tu) + a
-	if t > tok.get_check(1) || tok.get_check(t) != tu {
-		fmt.Println("xMatch is not fine!", t, "and", tok.get_check(t), "vs", tu)
+	// Epsilon transition failed
+	if t > tok.getCheck(1) || tok.getCheck(t) != tu {
+		if DEBUG {
+			fmt.Println("Match is not fine!", t, "and", tok.getCheck(t), "vs", tu)
+		}
 		return false
-	} else if tok.get_base(t) < 0 {
+
+	} else if tok.getBase(t) < 0 {
 		// Move to representative state
-		t = -1 * tok.get_base(t)
-		goto FINALCHECK
+		t = -1 * tok.getBase(t)
 	}
+
 	goto FINALCHECK
 }
-
-// In the final realization, the states can only have 30 bits:
-// base[1] -> is final
-// base[2] -> is_separate
-// check[1] -> translates to epsilon
-// check[2] -> appends newine (Maybe)
-// If check[1] && check[2] is set, this translates to a sentence split (Maybe)
diff --git a/datokenizer_test.go b/datokenizer_test.go
index 7651c86..4ba42b3 100644
--- a/datokenizer_test.go
+++ b/datokenizer_test.go
@@ -10,32 +10,43 @@
 	assert := assert.New(t)
 
 	// bau | bauamt
-	tok := parse_file("testdata/bauamt.fst")
-	tok.buildDA()
-	assert.True(tok.match("bau"))
-	assert.True(tok.match("bauamt"))
-	assert.False(tok.match("baum"))
+	tok := ParseFile("testdata/bauamt.fst")
+	tok.ToDoubleArray()
+	assert.True(tok.Match("bau"))
+	assert.True(tok.Match("bauamt"))
+	assert.False(tok.Match("baum"))
 }
 
 func TestSimpleBranches(t *testing.T) {
 	assert := assert.New(t)
 
 	// (bau | wahl) (amt | en)
-	tok := parse_file("testdata/wahlamt.fst")
-	tok.buildDA()
-	assert.False(tok.match("bau"))
-	assert.True(tok.match("bauamt"))
-	assert.True(tok.match("wahlamt"))
-	assert.True(tok.match("bauen"))
-	assert.True(tok.match("wahlen"))
-	assert.False(tok.match("baum"))
+	tok := ParseFile("testdata/wahlamt.fst")
+	tok.ToDoubleArray()
+	assert.False(tok.Match("bau"))
+	assert.True(tok.Match("bauamt"))
+	assert.True(tok.Match("wahlamt"))
+	assert.True(tok.Match("bauen"))
+	assert.True(tok.Match("wahlen"))
+	assert.False(tok.Match("baum"))
 }
 
 func TestSimpleTokenizer(t *testing.T) {
 	assert := assert.New(t)
-	tok := parse_file("testdata/simpletok.fst")
-	tok.buildDA()
-	assert.True(tok.match("bau"))
-	assert.True(tok.match("bad"))
-	assert.True(tok.match("wald gehen"))
+	tok := ParseFile("testdata/simpletok.fst")
+	tok.ToDoubleArray()
+	assert.True(tok.Match("bau"))
+	assert.True(tok.Match("bad"))
+	assert.True(tok.Match("wald gehen"))
 }
+
+/*
+func TestFullTokenizer(t *testing.T) {
+	assert := assert.New(t)
+	tok := ParseFile("testdata/tokenizer.fst")
+	tok.ToDoubleArray()
+	assert.True(tok.Match("bau"))
+	assert.True(tok.Match("bad"))
+	assert.True(tok.Match("wald gehen"))
+}
+*/
diff --git a/go.mod b/go.mod
index 53dc7d0..9d677ca 100644
--- a/go.mod
+++ b/go.mod
@@ -2,4 +2,7 @@
 
 go 1.16
 
-require github.com/stretchr/testify v1.7.0
+require (
+	github.com/rs/zerolog v1.23.0 // indirect
+	github.com/stretchr/testify v1.7.0
+)
diff --git a/go.sum b/go.sum
index acb88a4..e4d7005 100644
--- a/go.sum
+++ b/go.sum
@@ -1,10 +1,38 @@
+github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc=
 github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8=
 github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
+github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
 github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
 github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ=
+github.com/rs/zerolog v1.23.0 h1:UskrK+saS9P9Y789yNNulYKdARjPZuS35B8gJF2x60g=
+github.com/rs/zerolog v1.23.0/go.mod h1:6c7hFfxPOy7TacJc4Fcdi24/J0NKYGzjG8FWRI916Qo=
 github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
 github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
 github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
+golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
+golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
+golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
+golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
+golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
+golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
+golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
+golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
+golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0=
+golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
 gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
 gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
 gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=