blob: 668a48cf6ec010c26a92b5d352d106dae22f0f54 [file] [log] [blame]
dazae3bc92e2020-11-04 11:06:26 +01001# TODO: write a client to make multiple requests to the server!
2import subprocess, json, time
3import requests, glob, logging
4import os.path, sys
5from lib.CoNLL_Annotation import get_token_type
6import my_utils.file_utils as fu
7import argparse
8
daza54e072e2020-11-04 11:06:26 +01009
dazae3bc92e2020-11-04 11:06:26 +010010if __name__ == "__main__":
11
12 """
13 EXECUTE:
14
15 python systems/parse_turku.py --corpus_name DE_GSD --gld_token_type CoNLLUP_Token \
16 -i /home/daza/datasets/ud-treebanks-v2.2/UD_German-GSD/de_gsd-ud-test.conllu
dazad7d70752021-01-12 18:17:49 +010017
18 python systems/parse_turku.py --corpus_name TigerTestOld --gld_token_type CoNLLUP_Token \
19 -i /home/daza/datasets/TIGER_conll/data_splits/test/Tiger.OldOrth.test.conll
dazae3bc92e2020-11-04 11:06:26 +010020
21 """
22
23 parser = argparse.ArgumentParser()
24 parser.add_argument("-i", "--input_file", help="Input Corpus", required=True)
dazafb308a22021-01-27 16:20:08 +010025 parser.add_argument("-o", "--output_file", help="Output Parsed Corpus", default=None)
dazae3bc92e2020-11-04 11:06:26 +010026 parser.add_argument("-n", "--corpus_name", help="Corpus Name", default="Corpus")
27 parser.add_argument("-gtt", "--gld_token_type", help="CoNLL Format of the Gold Data", default="CoNLL09_Token")
28 parser.add_argument("-c", "--comment_str", help="CoNLL Format of comentaries inside the file", default="#")
29 args = parser.parse_args()
30
31 file_has_next, chunk_ix = True, 0
32 CHUNK_SIZE = 10000
33
dazafb308a22021-01-27 16:20:08 +010034 output_file = args.input_file if not args.output_file else args.output_file
35
dazae3bc92e2020-11-04 11:06:26 +010036 # =====================================================================================
37 # LOGGING INFO ...
38 # =====================================================================================
39 logger = logging.getLogger(__name__)
40 console_hdlr = logging.StreamHandler(sys.stdout)
41 file_hdlr = logging.FileHandler(filename=f"logs/Parse_{args.corpus_name}_Turku.log")
42 logging.basicConfig(level=logging.INFO, handlers=[console_hdlr, file_hdlr])
43 logger.info(f"Chunking TIGER Corpus in chunks of {CHUNK_SIZE} Sentences")
44
45 # =====================================================================================
46 # PROCESS (PARSE) TIGER Corpus ...
47 # =====================================================================================
48 start = time.time()
49 total_processed_sents = 0
50 line_generator = fu.file_generator(args.input_file)
51 while file_has_next:
52 raw_text, file_has_next, n_sents = fu.get_file_chunk(line_generator, chunk_size=CHUNK_SIZE, token_class=get_token_type(args.gld_token_type), comment_str=args.comment_str)
53 total_processed_sents += n_sents
54 if len(raw_text) > 0:
dazafb308a22021-01-27 16:20:08 +010055 fu.turku_parse_file(raw_text, output_file, chunk_ix)
dazae3bc92e2020-11-04 11:06:26 +010056 now = time.time()
57 elapsed = (now - start)
58 logger.info(f"Time Elapsed: {elapsed}. Processed {total_processed_sents}. [{total_processed_sents/elapsed} Sents/sec]\n") # Toks/Sec???
59 chunk_ix += 1
60 if chunk_ix == 10: break
61 end = time.time()
62 logger.info(f"Processing File {args.corpus_name} took {(end - start)} seconds!")
63
64