Support offsets in token writer

Change-Id: I1a75f8b8b3e00a0d469580cb1abee72153bbe362
diff --git a/token_writer_test.go b/token_writer_test.go
index 84a4074..8ab6ed0 100644
--- a/token_writer_test.go
+++ b/token_writer_test.go
@@ -19,7 +19,7 @@
 
 	tws.Token(0, []rune{'a', 'b', 'c'})
 
-	tws.Token(0, []rune{'d', 'e', 'f'})
+	tws.Token(1, []rune{'d', 'e', 'f'})
 
 	tws.SentenceEnd(0)
 
@@ -27,5 +27,5 @@
 
 	tws.Flush()
 
-	assert.Equal("abc\ndef\n\n\n", w.String())
+	assert.Equal("abc\nef\n\n\n", w.String())
 }