Make tokenizer robust and never failing
Change-Id: I7f249434bc233b560c8d493f1f0c2abd4d69db91
diff --git a/token_writer.go b/token_writer.go
index ed580ef..f6dffa4 100644
--- a/token_writer.go
+++ b/token_writer.go
@@ -23,6 +23,7 @@
TextEnd func(int)
Flush func() error
Token func(int, []rune)
+ // Fail func(int)
}
// Create a new token writer based on the options
@@ -36,6 +37,8 @@
tw := &TokenWriter{}
+ // tw.Fail = func(_ int) {}
+
// Collect token positions and maybe tokens
if flags&(TOKEN_POS|SENTENCE_POS) != 0 {