Akron | 8e1d69b | 2021-08-12 17:38:49 +0200 | [diff] [blame] | 1 | package main |
| 2 | |
| 3 | import ( |
Akron | 7e269d4 | 2021-08-12 23:18:05 +0200 | [diff] [blame] | 4 | "fmt" |
Akron | 8e1d69b | 2021-08-12 17:38:49 +0200 | [diff] [blame] | 5 | "os" |
| 6 | |
Akron | 527c10c | 2021-08-13 01:45:18 +0200 | [diff] [blame] | 7 | "log" |
| 8 | |
Akron | 7f1097f | 2021-09-21 16:00:29 +0200 | [diff] [blame] | 9 | datok "github.com/KorAP/datok" |
Akron | 8e1d69b | 2021-08-12 17:38:49 +0200 | [diff] [blame] | 10 | "github.com/alecthomas/kong" |
| 11 | ) |
| 12 | |
| 13 | var cli struct { |
Akron | 7e269d4 | 2021-08-12 23:18:05 +0200 | [diff] [blame] | 14 | Convert struct { |
Akron | 941f215 | 2021-09-26 15:14:25 +0200 | [diff] [blame] | 15 | Foma string `kong:"required,short='i',help='The Foma file'"` |
| 16 | Tokenizer string `kong:"required,short='o',help='The Tokenizer file'"` |
| 17 | DoubleArray bool `kong:"optional,short='d',help='Convert to Double Array instead of Matrix representation'"` |
| 18 | } `kong:"cmd, help='Convert a foma file to a Matrix or Double Array tokenizer'"` |
Akron | 7e269d4 | 2021-08-12 23:18:05 +0200 | [diff] [blame] | 19 | Tokenize struct { |
Akron | fceddb6 | 2021-10-27 19:27:54 +0200 | [diff] [blame] | 20 | Tokenizer string `kong:"required,short='t',help='The Matrix or Double Array Tokenizer file'"` |
| 21 | Tokens bool `kong:"optional,negatable,default=true,help='Print token surfaces'"` |
| 22 | Sentences bool `kong:"optional,negatable,default=true,help='Print sentence boundaries'"` |
| 23 | TokenPositions bool `kong:"optional,negatable,default=false,short='p',help='Print token offsets'"` |
| 24 | SentencePositions bool `kong:"optional,negatable,default=false,help='Print sentence offsets'"` |
Akron | 0f087ea | 2021-10-27 19:40:15 +0200 | [diff] [blame] | 25 | NewlineAfterEOT bool `kong:"optional,negatable,default=false,help='Ignore newline after EOT'"` |
Akron | 7e269d4 | 2021-08-12 23:18:05 +0200 | [diff] [blame] | 26 | } `kong:"cmd, help='Tokenize a text'"` |
Akron | 8e1d69b | 2021-08-12 17:38:49 +0200 | [diff] [blame] | 27 | } |
| 28 | |
| 29 | // Main method for command line handling |
| 30 | func main() { |
| 31 | |
| 32 | // Parse command line parameters |
| 33 | parser := kong.Must( |
| 34 | &cli, |
| 35 | kong.Name("datok"), |
Akron | 941f215 | 2021-09-26 15:14:25 +0200 | [diff] [blame] | 36 | kong.Description("FSA based tokenizer"), |
Akron | 8e1d69b | 2021-08-12 17:38:49 +0200 | [diff] [blame] | 37 | kong.UsageOnError(), |
| 38 | ) |
| 39 | |
Akron | 7e269d4 | 2021-08-12 23:18:05 +0200 | [diff] [blame] | 40 | ctx, err := parser.Parse(os.Args[1:]) |
Akron | 8e1d69b | 2021-08-12 17:38:49 +0200 | [diff] [blame] | 41 | |
| 42 | parser.FatalIfErrorf(err) |
| 43 | |
Akron | 7e269d4 | 2021-08-12 23:18:05 +0200 | [diff] [blame] | 44 | if ctx.Command() == "convert" { |
| 45 | tok := datok.LoadFomaFile(cli.Convert.Foma) |
| 46 | if tok == nil { |
Akron | 527c10c | 2021-08-13 01:45:18 +0200 | [diff] [blame] | 47 | log.Fatalln("Unable to load foma file") |
Akron | 7e269d4 | 2021-08-12 23:18:05 +0200 | [diff] [blame] | 48 | } |
Akron | 941f215 | 2021-09-26 15:14:25 +0200 | [diff] [blame] | 49 | if cli.Convert.DoubleArray { |
| 50 | dat := tok.ToDoubleArray() |
| 51 | _, err := dat.Save(cli.Convert.Tokenizer) |
| 52 | if err != nil { |
| 53 | log.Fatalln(err) |
| 54 | } |
| 55 | } else { |
| 56 | mat := tok.ToMatrix() |
| 57 | _, err := mat.Save(cli.Convert.Tokenizer) |
| 58 | if err != nil { |
| 59 | log.Fatalln(err) |
| 60 | } |
Akron | 7e269d4 | 2021-08-12 23:18:05 +0200 | [diff] [blame] | 61 | } |
| 62 | fmt.Println("File successfully converted.") |
| 63 | os.Exit(0) |
| 64 | } |
| 65 | |
Akron | 941f215 | 2021-09-26 15:14:25 +0200 | [diff] [blame] | 66 | // Load the Datok or Matrix file |
| 67 | dat := datok.LoadTokenizerFile(cli.Tokenize.Tokenizer) |
Akron | 8e1d69b | 2021-08-12 17:38:49 +0200 | [diff] [blame] | 68 | |
| 69 | // Unable to load the datok file |
| 70 | if dat == nil { |
Akron | 941f215 | 2021-09-26 15:14:25 +0200 | [diff] [blame] | 71 | log.Fatalln("Unable to load file") |
Akron | 8e1d69b | 2021-08-12 17:38:49 +0200 | [diff] [blame] | 72 | os.Exit(1) |
| 73 | } |
| 74 | |
Akron | 0f087ea | 2021-10-27 19:40:15 +0200 | [diff] [blame] | 75 | // Create flags parameter based on command line parameters |
| 76 | var flags datok.Bits |
| 77 | if cli.Tokenize.Tokens { |
| 78 | flags |= datok.TOKENS |
| 79 | } |
| 80 | |
| 81 | if cli.Tokenize.TokenPositions { |
| 82 | flags |= datok.TOKEN_POS |
| 83 | } |
| 84 | |
| 85 | if cli.Tokenize.Sentences { |
| 86 | flags |= datok.SENTENCES |
| 87 | } |
| 88 | |
| 89 | if cli.Tokenize.SentencePositions { |
| 90 | flags |= datok.SENTENCE_POS |
| 91 | } |
| 92 | |
| 93 | if cli.Tokenize.NewlineAfterEOT { |
| 94 | flags |= datok.NEWLINE_AFTER_EOT |
| 95 | } |
| 96 | |
Akron | 4f6b28c | 2021-10-25 00:52:03 +0200 | [diff] [blame] | 97 | // Create token writer based on the options defined |
Akron | 0f087ea | 2021-10-27 19:40:15 +0200 | [diff] [blame] | 98 | tw := datok.NewTokenWriterFromOptions(os.Stdout, flags) |
Akron | 4f6b28c | 2021-10-25 00:52:03 +0200 | [diff] [blame] | 99 | |
Akron | 8e1d69b | 2021-08-12 17:38:49 +0200 | [diff] [blame] | 100 | // Program is running in a pipe |
| 101 | fileInfo, _ := os.Stdin.Stat() |
| 102 | if fileInfo.Mode()&os.ModeCharDevice == 0 { |
Akron | 4f6b28c | 2021-10-25 00:52:03 +0200 | [diff] [blame] | 103 | dat.TransduceTokenWriter(os.Stdin, tw) |
| 104 | tw.Flush() |
Akron | 8e1d69b | 2021-08-12 17:38:49 +0200 | [diff] [blame] | 105 | } |
| 106 | } |