Fix buffer flushing to work with tei2korapxml

Change-Id: I54eef64bea40f4ebb528e7f9870b9277000a4c13
diff --git a/cmd/datok.go b/cmd/datok.go
index e2a4efb..1c9d99b 100644
--- a/cmd/datok.go
+++ b/cmd/datok.go
@@ -16,7 +16,7 @@
 		Foma        string `kong:"required,short='i',help='The Foma FST file'"`
 		Tokenizer   string `kong:"required,short='o',help='The Tokenizer file'"`
 		DoubleArray bool   `kong:"optional,short='d',help='Convert to Double Array instead of Matrix representation'"`
-	} `kong:"cmd, help='Convert a foma file to a Matrix or Double Array tokenizer'"`
+	} `kong:"cmd, help='Convert a compiled foma FST file to a Matrix or Double Array tokenizer'"`
 	Tokenize struct {
 		Tokenizer         string `kong:"required,short='t',help='The Matrix or Double Array Tokenizer file'"`
 		Input             string `kong:"required,arg='',type='existingfile',help='Input file to tokenize (use - for STDIN)'"`
@@ -98,6 +98,7 @@
 
 	// Create token writer based on the options defined
 	tw := datok.NewTokenWriter(os.Stdout, flags)
+	defer os.Stdout.Close()
 
 	var r io.Reader
 
@@ -106,6 +107,7 @@
 		fileInfo, _ := os.Stdin.Stat()
 		if fileInfo.Mode()&os.ModeCharDevice == 0 {
 			r = os.Stdin
+			defer os.Stdin.Close()
 		} else {
 			log.Fatalln("Unable to read from STDIN")
 			os.Exit(1)