Introduce token_writer object

This change also removes final state
sensibility from the tokenizer. Tokens
now require a tokenend transition to
be treated as complete.
diff --git a/token_writer_test.go b/token_writer_test.go
new file mode 100644
index 0000000..9678157
--- /dev/null
+++ b/token_writer_test.go
@@ -0,0 +1,29 @@
+package datok
+
+import (
+	"bytes"
+	"testing"
+
+	"github.com/stretchr/testify/assert"
+)
+
+func TestTokenWriterSimple(t *testing.T) {
+	assert := assert.New(t)
+
+	b := make([]byte, 0, 2048)
+	w := bytes.NewBuffer(b)
+
+	tws := NewTokenWriterSimple(w)
+
+	assert.NotNil(tws)
+
+	tws.Token(0, []rune{'a', 'b', 'c'})
+
+	tws.Token(0, []rune{'d', 'e', 'f'})
+
+	tws.SentenceEnd()
+
+	tws.Flush()
+
+	assert.Equal("abc\ndef\n\n", w.String())
+}