Added SpaCy
Change-Id: Ic87af300ee2f557db9b065e023f11e4f9bbb9ffa
diff --git a/spacy/spacy_tok.py b/spacy/spacy_tok.py
new file mode 100644
index 0000000..62f2bff
--- /dev/null
+++ b/spacy/spacy_tok.py
@@ -0,0 +1,17 @@
+import sys
+
+from spacy.lang.de import German
+
+nlp = German()
+
+# Create a Tokenizer with the default settings for English
+# including punctuation rules and exceptions
+tokenizer = nlp.tokenizer
+
+with open(sys.argv[1], 'r') as f:
+ contents = f.read()
+
+ tokens = tokenizer(contents)
+
+ for t in tokens:
+ print(t)