daza | e3bc92e | 2020-11-04 11:06:26 +0100 | [diff] [blame] | 1 | # TODO: write a client to make multiple requests to the server! |
| 2 | import subprocess, json, time |
| 3 | import requests, glob, logging |
| 4 | import os.path, sys |
| 5 | from lib.CoNLL_Annotation import get_token_type |
| 6 | import my_utils.file_utils as fu |
| 7 | import argparse |
| 8 | |
daza | e3bc92e | 2020-11-04 11:06:26 +0100 | [diff] [blame] | 9 | if __name__ == "__main__": |
| 10 | |
| 11 | """ |
| 12 | EXECUTE: |
| 13 | |
| 14 | python systems/parse_turku.py --corpus_name DE_GSD --gld_token_type CoNLLUP_Token \ |
| 15 | -i /home/daza/datasets/ud-treebanks-v2.2/UD_German-GSD/de_gsd-ud-test.conllu |
daza | d7d7075 | 2021-01-12 18:17:49 +0100 | [diff] [blame^] | 16 | |
| 17 | python systems/parse_turku.py --corpus_name TigerTestOld --gld_token_type CoNLLUP_Token \ |
| 18 | -i /home/daza/datasets/TIGER_conll/data_splits/test/Tiger.OldOrth.test.conll |
daza | e3bc92e | 2020-11-04 11:06:26 +0100 | [diff] [blame] | 19 | |
| 20 | """ |
| 21 | |
| 22 | parser = argparse.ArgumentParser() |
| 23 | parser.add_argument("-i", "--input_file", help="Input Corpus", required=True) |
| 24 | parser.add_argument("-n", "--corpus_name", help="Corpus Name", default="Corpus") |
| 25 | parser.add_argument("-gtt", "--gld_token_type", help="CoNLL Format of the Gold Data", default="CoNLL09_Token") |
| 26 | parser.add_argument("-c", "--comment_str", help="CoNLL Format of comentaries inside the file", default="#") |
| 27 | args = parser.parse_args() |
| 28 | |
| 29 | file_has_next, chunk_ix = True, 0 |
| 30 | CHUNK_SIZE = 10000 |
| 31 | |
| 32 | # ===================================================================================== |
| 33 | # LOGGING INFO ... |
| 34 | # ===================================================================================== |
| 35 | logger = logging.getLogger(__name__) |
| 36 | console_hdlr = logging.StreamHandler(sys.stdout) |
| 37 | file_hdlr = logging.FileHandler(filename=f"logs/Parse_{args.corpus_name}_Turku.log") |
| 38 | logging.basicConfig(level=logging.INFO, handlers=[console_hdlr, file_hdlr]) |
| 39 | logger.info(f"Chunking TIGER Corpus in chunks of {CHUNK_SIZE} Sentences") |
| 40 | |
| 41 | # ===================================================================================== |
| 42 | # PROCESS (PARSE) TIGER Corpus ... |
| 43 | # ===================================================================================== |
| 44 | start = time.time() |
| 45 | total_processed_sents = 0 |
| 46 | line_generator = fu.file_generator(args.input_file) |
| 47 | while file_has_next: |
| 48 | raw_text, file_has_next, n_sents = fu.get_file_chunk(line_generator, chunk_size=CHUNK_SIZE, token_class=get_token_type(args.gld_token_type), comment_str=args.comment_str) |
| 49 | total_processed_sents += n_sents |
| 50 | if len(raw_text) > 0: |
| 51 | fu.turku_parse_file(raw_text, args.input_file, chunk_ix) |
| 52 | now = time.time() |
| 53 | elapsed = (now - start) |
| 54 | logger.info(f"Time Elapsed: {elapsed}. Processed {total_processed_sents}. [{total_processed_sents/elapsed} Sents/sec]\n") # Toks/Sec??? |
| 55 | chunk_ix += 1 |
| 56 | if chunk_ix == 10: break |
| 57 | end = time.time() |
| 58 | logger.info(f"Processing File {args.corpus_name} took {(end - start)} seconds!") |
| 59 | |
| 60 | |