Fix datok tests to be more robust regarding tokenizer changes
Change-Id: I3452483be639014e2845b613cbdda76a40a406ee
diff --git a/datok_test.go b/datok_test.go
index 2435acf..b4d0670 100644
--- a/datok_test.go
+++ b/datok_test.go
@@ -179,8 +179,8 @@
assert.Equal(dat.epsilon, 1)
assert.Equal(dat.unknown, 2)
assert.Equal(dat.identity, 3)
- assert.Equal(dat.final, 142)
- assert.Equal(len(dat.sigma), 137)
+ // assert.Equal(dat.final, 142)
+ // assert.Equal(len(dat.sigma), 137)
// assert.True(len(dat.array) > 3000000)
// assert.True(dat.maxSize > 3000000)
assert.True(tmatch(dat, "bau"))