Akron | 8e1d69b | 2021-08-12 17:38:49 +0200 | [diff] [blame] | 1 | package main |
| 2 | |
| 3 | import ( |
Akron | 7e269d4 | 2021-08-12 23:18:05 +0200 | [diff] [blame] | 4 | "fmt" |
Akron | 15bb13d | 2021-10-30 11:57:41 +0200 | [diff] [blame] | 5 | "io" |
Akron | 8e1d69b | 2021-08-12 17:38:49 +0200 | [diff] [blame] | 6 | "os" |
| 7 | |
Akron | 527c10c | 2021-08-13 01:45:18 +0200 | [diff] [blame] | 8 | "log" |
| 9 | |
Akron | 7f1097f | 2021-09-21 16:00:29 +0200 | [diff] [blame] | 10 | datok "github.com/KorAP/datok" |
Akron | 8e1d69b | 2021-08-12 17:38:49 +0200 | [diff] [blame] | 11 | "github.com/alecthomas/kong" |
| 12 | ) |
| 13 | |
Akron | 54ed7e7 | 2022-01-04 12:05:00 +0100 | [diff] [blame] | 14 | // TODO: |
| 15 | // - Support version information based on |
| 16 | // https://blog.carlmjohnson.net/post/2021/golang-118-minor-features/ |
| 17 | |
Akron | 8e1d69b | 2021-08-12 17:38:49 +0200 | [diff] [blame] | 18 | var cli struct { |
Akron | 7e269d4 | 2021-08-12 23:18:05 +0200 | [diff] [blame] | 19 | Convert struct { |
Akron | 15bb13d | 2021-10-30 11:57:41 +0200 | [diff] [blame] | 20 | Foma string `kong:"required,short='i',help='The Foma FST file'"` |
Akron | 941f215 | 2021-09-26 15:14:25 +0200 | [diff] [blame] | 21 | Tokenizer string `kong:"required,short='o',help='The Tokenizer file'"` |
| 22 | DoubleArray bool `kong:"optional,short='d',help='Convert to Double Array instead of Matrix representation'"` |
Akron | 274600e | 2021-11-03 20:09:06 +0100 | [diff] [blame] | 23 | } `kong:"cmd, help='Convert a compiled foma FST file to a Matrix or Double Array tokenizer'"` |
Akron | 7e269d4 | 2021-08-12 23:18:05 +0200 | [diff] [blame] | 24 | Tokenize struct { |
Akron | fceddb6 | 2021-10-27 19:27:54 +0200 | [diff] [blame] | 25 | Tokenizer string `kong:"required,short='t',help='The Matrix or Double Array Tokenizer file'"` |
Akron | 15bb13d | 2021-10-30 11:57:41 +0200 | [diff] [blame] | 26 | Input string `kong:"required,arg='',type='existingfile',help='Input file to tokenize (use - for STDIN)'"` |
Akron | 2612f99 | 2021-10-27 20:12:15 +0200 | [diff] [blame] | 27 | Tokens bool `kong:"optional,negatable,default=true,help='Print token surfaces (defaults to ${default})'"` |
| 28 | Sentences bool `kong:"optional,negatable,default=true,help='Print sentence boundaries (defaults to ${default})'"` |
| 29 | TokenPositions bool `kong:"optional,default=false,short='p',help='Print token offsets (defaults to ${default})'"` |
| 30 | SentencePositions bool `kong:"optional,default=false,help='Print sentence offsets (defaults to ${default})'"` |
| 31 | NewlineAfterEOT bool `kong:"optional,default=false,help='Ignore newline after EOT (defaults to ${default})'"` |
Akron | 7e269d4 | 2021-08-12 23:18:05 +0200 | [diff] [blame] | 32 | } `kong:"cmd, help='Tokenize a text'"` |
Akron | 8e1d69b | 2021-08-12 17:38:49 +0200 | [diff] [blame] | 33 | } |
| 34 | |
| 35 | // Main method for command line handling |
| 36 | func main() { |
| 37 | |
| 38 | // Parse command line parameters |
| 39 | parser := kong.Must( |
| 40 | &cli, |
| 41 | kong.Name("datok"), |
Akron | 941f215 | 2021-09-26 15:14:25 +0200 | [diff] [blame] | 42 | kong.Description("FSA based tokenizer"), |
Akron | 8e1d69b | 2021-08-12 17:38:49 +0200 | [diff] [blame] | 43 | kong.UsageOnError(), |
| 44 | ) |
| 45 | |
Akron | 7e269d4 | 2021-08-12 23:18:05 +0200 | [diff] [blame] | 46 | ctx, err := parser.Parse(os.Args[1:]) |
Akron | 8e1d69b | 2021-08-12 17:38:49 +0200 | [diff] [blame] | 47 | |
| 48 | parser.FatalIfErrorf(err) |
| 49 | |
Akron | 7e269d4 | 2021-08-12 23:18:05 +0200 | [diff] [blame] | 50 | if ctx.Command() == "convert" { |
| 51 | tok := datok.LoadFomaFile(cli.Convert.Foma) |
| 52 | if tok == nil { |
Akron | 527c10c | 2021-08-13 01:45:18 +0200 | [diff] [blame] | 53 | log.Fatalln("Unable to load foma file") |
Akron | 7e269d4 | 2021-08-12 23:18:05 +0200 | [diff] [blame] | 54 | } |
Akron | 941f215 | 2021-09-26 15:14:25 +0200 | [diff] [blame] | 55 | if cli.Convert.DoubleArray { |
| 56 | dat := tok.ToDoubleArray() |
| 57 | _, err := dat.Save(cli.Convert.Tokenizer) |
| 58 | if err != nil { |
| 59 | log.Fatalln(err) |
| 60 | } |
| 61 | } else { |
| 62 | mat := tok.ToMatrix() |
| 63 | _, err := mat.Save(cli.Convert.Tokenizer) |
| 64 | if err != nil { |
| 65 | log.Fatalln(err) |
| 66 | } |
Akron | 7e269d4 | 2021-08-12 23:18:05 +0200 | [diff] [blame] | 67 | } |
| 68 | fmt.Println("File successfully converted.") |
| 69 | os.Exit(0) |
| 70 | } |
| 71 | |
Akron | 941f215 | 2021-09-26 15:14:25 +0200 | [diff] [blame] | 72 | // Load the Datok or Matrix file |
| 73 | dat := datok.LoadTokenizerFile(cli.Tokenize.Tokenizer) |
Akron | 8e1d69b | 2021-08-12 17:38:49 +0200 | [diff] [blame] | 74 | |
| 75 | // Unable to load the datok file |
| 76 | if dat == nil { |
Akron | 941f215 | 2021-09-26 15:14:25 +0200 | [diff] [blame] | 77 | log.Fatalln("Unable to load file") |
Akron | 8e1d69b | 2021-08-12 17:38:49 +0200 | [diff] [blame] | 78 | os.Exit(1) |
| 79 | } |
| 80 | |
Akron | 0f087ea | 2021-10-27 19:40:15 +0200 | [diff] [blame] | 81 | // Create flags parameter based on command line parameters |
| 82 | var flags datok.Bits |
| 83 | if cli.Tokenize.Tokens { |
| 84 | flags |= datok.TOKENS |
| 85 | } |
| 86 | |
| 87 | if cli.Tokenize.TokenPositions { |
| 88 | flags |= datok.TOKEN_POS |
| 89 | } |
| 90 | |
| 91 | if cli.Tokenize.Sentences { |
| 92 | flags |= datok.SENTENCES |
| 93 | } |
| 94 | |
| 95 | if cli.Tokenize.SentencePositions { |
| 96 | flags |= datok.SENTENCE_POS |
| 97 | } |
| 98 | |
| 99 | if cli.Tokenize.NewlineAfterEOT { |
| 100 | flags |= datok.NEWLINE_AFTER_EOT |
| 101 | } |
| 102 | |
Akron | 4f6b28c | 2021-10-25 00:52:03 +0200 | [diff] [blame] | 103 | // Create token writer based on the options defined |
Akron | 96fdc9b | 2021-10-27 21:11:17 +0200 | [diff] [blame] | 104 | tw := datok.NewTokenWriter(os.Stdout, flags) |
Akron | 274600e | 2021-11-03 20:09:06 +0100 | [diff] [blame] | 105 | defer os.Stdout.Close() |
Akron | 4f6b28c | 2021-10-25 00:52:03 +0200 | [diff] [blame] | 106 | |
Akron | 15bb13d | 2021-10-30 11:57:41 +0200 | [diff] [blame] | 107 | var r io.Reader |
| 108 | |
Akron | 8e1d69b | 2021-08-12 17:38:49 +0200 | [diff] [blame] | 109 | // Program is running in a pipe |
Akron | 15bb13d | 2021-10-30 11:57:41 +0200 | [diff] [blame] | 110 | if cli.Tokenize.Input == "-" { |
| 111 | fileInfo, _ := os.Stdin.Stat() |
| 112 | if fileInfo.Mode()&os.ModeCharDevice == 0 { |
| 113 | r = os.Stdin |
Akron | 274600e | 2021-11-03 20:09:06 +0100 | [diff] [blame] | 114 | defer os.Stdin.Close() |
Akron | 15bb13d | 2021-10-30 11:57:41 +0200 | [diff] [blame] | 115 | } else { |
| 116 | log.Fatalln("Unable to read from STDIN") |
| 117 | os.Exit(1) |
| 118 | return |
| 119 | } |
| 120 | } else { |
| 121 | f, err := os.Open(cli.Tokenize.Input) |
| 122 | if err != nil { |
| 123 | log.Fatalln(err) |
| 124 | os.Exit(1) |
| 125 | return |
| 126 | } |
| 127 | defer f.Close() |
| 128 | r = f |
Akron | 8e1d69b | 2021-08-12 17:38:49 +0200 | [diff] [blame] | 129 | } |
Akron | 15bb13d | 2021-10-30 11:57:41 +0200 | [diff] [blame] | 130 | |
| 131 | dat.TransduceTokenWriter(r, tw) |
| 132 | tw.Flush() |
Akron | 8e1d69b | 2021-08-12 17:38:49 +0200 | [diff] [blame] | 133 | } |