Optimize tests by avoiding reload of tokenizers
Change-Id: Ia70dab59fc3cfe5e47a1540724336214addb824e
diff --git a/token_writer.go b/token_writer.go
index 11179d3..bccb1bd 100644
--- a/token_writer.go
+++ b/token_writer.go
@@ -89,7 +89,7 @@
// Collect sentence positions and maybe sentence boundaries
if flags&SENTENCE_POS != 0 {
- tw.SentenceEnd = func(offset int) {
+ tw.SentenceEnd = func(_ int) {
// Add end position of last token to sentence boundary
// TODO: This only works if token positions are taking into account