- removing all exceptions thrown, instead pass on errors in serialization
- bugfix: wrong class numbers in references
diff --git a/src/main/java/de/ids_mannheim/korap/query/serialize/AbstractQueryProcessor.java b/src/main/java/de/ids_mannheim/korap/query/serialize/AbstractQueryProcessor.java
index e6c4b4b..ca09dc7 100644
--- a/src/main/java/de/ids_mannheim/korap/query/serialize/AbstractQueryProcessor.java
+++ b/src/main/java/de/ids_mannheim/korap/query/serialize/AbstractQueryProcessor.java
@@ -12,8 +12,6 @@
 
 public abstract class AbstractQueryProcessor {
 	
-	public abstract void process(String query);
-	
 	Logger log;
 	/**
 	 *  The query
@@ -60,6 +58,8 @@
 		requestMap.put("meta", new LinkedHashMap<String, Object>());
 	}
 	
+	public abstract void process(String query);
+	
 	public void addWarning(int code, String msg) {
 		List<Object> warning = Arrays.asList(new Object[]{code, msg}); 
 		warnings.add(warning);
@@ -92,4 +92,4 @@
 	public Map<String, Object> getRequestMap() {
 		return requestMap;
 	}
-}
+}
\ No newline at end of file
diff --git a/src/main/java/de/ids_mannheim/korap/query/serialize/AnnisQueryProcessor.java b/src/main/java/de/ids_mannheim/korap/query/serialize/AnnisQueryProcessor.java
index a823f9d..52c5c78 100644
--- a/src/main/java/de/ids_mannheim/korap/query/serialize/AnnisQueryProcessor.java
+++ b/src/main/java/de/ids_mannheim/korap/query/serialize/AnnisQueryProcessor.java
@@ -77,18 +77,19 @@
 	 * spans to refer back to previously established classes for entities.
 	 */
 	private LinkedHashMap<String, Integer> refClassMapping = new LinkedHashMap<String, Integer>();
+	/**
+	 * Keeps track of the number of references to a node/token by means of #n. E.g. in the query 
+	 * <tt>tok="x" & tok="y" & tok="z" & #1 . #2 & #2 . #3</tt>, the 2nd token ("y") is referenced twice, the others once.
+	 */
 	private LinkedHashMap<String, Integer> nodeReferencesTotal = new LinkedHashMap<String, Integer>();
+	/**
+	 * Keeps track of the number of references to a node/token that have already been processed.
+	 */
 	private LinkedHashMap<String, Integer> nodeReferencesProcessed = new LinkedHashMap<String, Integer>();
 
-	/**
-	 * 
-	 * @param tree The syntax tree as returned by ANTLR
-	 * @param parser The ANTLR parser instance that generated the parse tree
-	 */
 	public AnnisQueryProcessor(String query) {
 		CqlfObjectGenerator.setQueryProcessor(this);
 		process(query);
-		System.out.println(">>> "+requestMap.get("query")+" <<<");
 	}
 
 	@Override
@@ -210,7 +211,7 @@
 				}
 			}
 		}
-		System.err.println(nodeVariables);
+		if (verbose) System.err.println(nodeVariables);
 	}
 
 	private void processUnary_linguistic_term(ParseTree node) {
@@ -311,7 +312,7 @@
 					operand = CqlfObjectGenerator.wrapInClass(operand, classCounter++);
 				} else if (nodeReferencesProcessed.get(ref)>0 && nodeReferencesTotal.get(ref)>1) {
 					try {
-						operand = CqlfObjectGenerator.wrapInReference(operandStack.pop(), refClassMapping.get(ref));
+						operand = CqlfObjectGenerator.wrapInReference(operandStack.pop(), refClassMapping.get(ref), true);
 					} catch (NoSuchElementException e) {
 						operand = CqlfObjectGenerator.makeReference(refClassMapping.get(ref));
 					}
@@ -365,7 +366,7 @@
 					innerOperands.add(CqlfObjectGenerator.wrapInClass(CqlfObjectGenerator.makeSpan(), classCounter));
 					// add the first operand and wrap the whole group in a focusing reference 
 					innerOperands.add(operand1);
-					innerGroup = CqlfObjectGenerator.wrapInReference(innerGroup, classCounter);
+					innerGroup = CqlfObjectGenerator.wrapInReference(innerGroup, classCounter, true);
 					outerOperands.add(innerGroup);
 				} else {
 					outerOperands.add(operandStack.pop());
@@ -378,7 +379,7 @@
 
 				// Wrap in another reference object in case other relations are following
 				if (i < node.getChildCount()-2) {
-					group = CqlfObjectGenerator.wrapInReference(group, classCounter);
+					group = CqlfObjectGenerator.wrapInReference(group, classCounter, true);
 				}
 				// All other n-ary linguistic relations have special 'relation' attributes defined in CQLF and can be
 				// handled more easily...
@@ -419,7 +420,7 @@
 				
 				// Wrap in reference object in case other relations are following
 				if (i < node.getChildCount()-2) {
-					group = CqlfObjectGenerator.wrapInReference(group, classCounter);
+					group = CqlfObjectGenerator.wrapInReference(group, classCounter, true);
 				}
 
 				// Inject operands.
@@ -714,20 +715,17 @@
 		Antlr4DescriptiveErrorListener errorListener = new Antlr4DescriptiveErrorListener(query);
 		// Like p. 111
 		try {
-
 			// Tokenize input data
 			ANTLRInputStream input = new ANTLRInputStream(query);
 			lexer.setInputStream(input);
 			CommonTokenStream tokens = new CommonTokenStream(lexer);
 			parser = new AqlParser(tokens);
-
 			// Don't throw out erroneous stuff
 			parser.setErrorHandler(new BailErrorStrategy());
 			lexer.removeErrorListeners();
             lexer.addErrorListener(errorListener);
             parser.removeErrorListeners();
             parser.addErrorListener(errorListener);
-
 			// Get starting rule from parser
 			Method startRule = AqlParser.class.getMethod("start"); 
 			tree = (ParserRuleContext) startRule.invoke(parser, (Object[])null);
diff --git a/src/main/java/de/ids_mannheim/korap/query/serialize/Antlr3AbstractQueryProcessor.java b/src/main/java/de/ids_mannheim/korap/query/serialize/Antlr3AbstractQueryProcessor.java
index 4faa5e8..822d022 100644
--- a/src/main/java/de/ids_mannheim/korap/query/serialize/Antlr3AbstractQueryProcessor.java
+++ b/src/main/java/de/ids_mannheim/korap/query/serialize/Antlr3AbstractQueryProcessor.java
@@ -8,19 +8,22 @@
 import org.antlr.runtime.Parser;
 import org.antlr.runtime.tree.Tree;
 
-public abstract class Antlr3AbstractQueryProcessor extends AbstractQueryProcessor {
-	
-	protected Parser parser;
+public abstract class Antlr3AbstractQueryProcessor extends
+        AbstractQueryProcessor {
+
+    protected Parser parser;
 
     /**
-     * Returns the category (or 'label') of the root of a (sub-) ParseTree (ANTLR 3).
+     * Returns the category (or 'label') of the root of a (sub-)
+     * ParseTree (ANTLR 3).
      *
      * @param node
      * @return
      */
     protected static String getNodeCat(Tree node) {
         String nodeCat = node.toStringTree();
-        Pattern p = Pattern.compile("\\((.*?)\\s"); // from opening parenthesis to 1st whitespace
+        // from opening parenthesis to 1st whitespace
+        Pattern p = Pattern.compile("\\((.*?)\\s"); 
         Matcher m = p.matcher(node.toStringTree());
         if (m.find()) {
             nodeCat = m.group(1);
@@ -31,9 +34,12 @@
     /**
      * Tests whether a certain node has a child by a certain name
      *
-     * @param node     The parent node.
-     * @param childCat The category of the potential child.
-     * @return true iff one or more children belong to the specified category
+     * @param node
+     *            The parent node.
+     * @param childCat
+     *            The category of the potential child.
+     * @return true iff one or more children belong to the specified
+     *         category
      */
     protected static boolean hasChild(Tree node, String childCat) {
         for (int i = 0; i < node.getChildCount(); i++) {
@@ -43,7 +49,7 @@
         }
         return false;
     }
-    
+
     protected static boolean hasDescendantWithCat(Tree node, String childCat) {
         for (int i = 0; i < node.getChildCount(); i++) {
             Tree child = node.getChild(i);
@@ -74,21 +80,21 @@
         }
         return children;
     }
-    
+
     protected static Tree getFirstChildWithCat(Tree node, String nodeCat) {
         return getNthChildWithCat(node, nodeCat, 1);
     }
-    
+
     protected static Tree getNthChildWithCat(Tree node, String nodeCat, int n) {
-    	int counter = 0;
-    	for (int i = 0; i < node.getChildCount(); i++) {
-    		if (getNodeCat(node.getChild(i)).equals(nodeCat)) {
-    			counter++;
-    			if (counter == n) {
-    				return node.getChild(i);
-    			}
-    		}
-    	}
+        int counter = 0;
+        for (int i = 0; i < node.getChildCount(); i++) {
+            if (getNodeCat(node.getChild(i)).equals(nodeCat)) {
+                counter++;
+                if (counter == n) {
+                    return node.getChild(i);
+                }
+            }
+        }
         return null;
     }
 }
diff --git a/src/main/java/de/ids_mannheim/korap/query/serialize/CollectionQueryProcessor.java b/src/main/java/de/ids_mannheim/korap/query/serialize/CollectionQueryProcessor.java
index 0cd2768..b76ad29 100644
--- a/src/main/java/de/ids_mannheim/korap/query/serialize/CollectionQueryProcessor.java
+++ b/src/main/java/de/ids_mannheim/korap/query/serialize/CollectionQueryProcessor.java
@@ -117,7 +117,7 @@
             if (checkDateValidity(valueNode)) {
         		addWarning("The collection query contains a value that looks like a date ('"+valueNode.getText()+"')"
         				+ " and an operator that is only defined for strings ('"+match+"'). The value is interpreted as "
-        						+ "a string, use a date operator to ensure the value is treated as a date");            	
+        						+ "a string. Use a date operator to ensure the value is treated as a date");            	
             }
             putIntoSuperObject(term);
         }
diff --git a/src/main/java/de/ids_mannheim/korap/query/serialize/Cosmas2QueryProcessor.java b/src/main/java/de/ids_mannheim/korap/query/serialize/Cosmas2QueryProcessor.java
index 2ab4ddb..53929e2 100644
--- a/src/main/java/de/ids_mannheim/korap/query/serialize/Cosmas2QueryProcessor.java
+++ b/src/main/java/de/ids_mannheim/korap/query/serialize/Cosmas2QueryProcessor.java
@@ -29,1109 +29,1287 @@
  */
 public class Cosmas2QueryProcessor extends Antlr3AbstractQueryProcessor {
 
-	private static Logger log = LoggerFactory.getLogger(Cosmas2QueryProcessor.class);
+    private static Logger log = LoggerFactory
+            .getLogger(Cosmas2QueryProcessor.class);
 
-	LinkedList<LinkedHashMap[]> toWrapStack = new LinkedList<LinkedHashMap[]>();
-	/**
-	 * Field for repetition query (Kleene + or * operations, or min/max queries: {2,4}
-	 */
-	String repetition = "";
-	/**
-	 * Global control structure for fieldGroups, keeps track of open fieldGroups.
-	 */
-	LinkedList<ArrayList<Object>> openFieldGroups = new LinkedList<ArrayList<Object>>();
-	/**
-	 * Keeps track of how many toWrap objects there are to pop after every recursion of {@link #processNode(ParseTree)}
-	 */
-	LinkedList<Integer> toWrapsToPop = new LinkedList<Integer>();
-	/**
-	 * Flag that indicates whether token fields or meta fields are currently being processed
-	 */
-	boolean inMeta = false;
-	/**
-	 * If true, a sequence (OPPROX node) is governed by an OPALL node (ALL()-operator), which requires to match
-	 * all tokens of the sequence.
-	 */
-	boolean inOPALL = false;
-	boolean inOPNHIT = false;
-	/**
+    LinkedList<LinkedHashMap[]> toWrapStack = new LinkedList<LinkedHashMap[]>();
+    /**
+     * Field for repetition query (Kleene + or * operations, or
+     * min/max queries: {2,4}
+     */
+    String repetition = "";
+    /**
+     * Global control structure for fieldGroups, keeps track of open
+     * fieldGroups.
+     */
+    LinkedList<ArrayList<Object>> openFieldGroups = new LinkedList<ArrayList<Object>>();
+    /**
+     * Keeps track of how many toWrap objects there are to pop after
+     * every recursion of {@link #processNode(ParseTree)}
+     */
+    LinkedList<Integer> toWrapsToPop = new LinkedList<Integer>();
+    /**
+     * Flag that indicates whether token fields or meta fields are
+     * currently being processed
+     */
+    boolean inMeta = false;
+    /**
+     * If true, a sequence (OPPROX node) is governed by an OPALL node
+     * (ALL()-operator), which requires to match all tokens of the
+     * sequence.
+     */
+    boolean inOPALL = false;
+    boolean inOPNHIT = false;
+    /**
 	 *
 	 */
-	int classCounter = 1;
-	boolean negate = false;
+    int classCounter = 1;
+    boolean negate = false;
 
-	/**
-	 * Allows for the definition of objects to be wrapped around the arguments of an operation.
-	 * Each record in the table holds the parent node of the argument, the number of the argument 
-	 * and an object in whose operands list the argument shall be wrapped.
-	 */
-	Table<Tree,Integer,LinkedHashMap<String,Object>> operandWrap = HashBasedTable.create();
+    /**
+     * Allows for the definition of objects to be wrapped around the
+     * arguments of an operation. Each record in the table holds the
+     * parent node of the argument, the number of the argument and an
+     * object in whose operands list the argument shall be wrapped.
+     */
+    Table<Tree, Integer, LinkedHashMap<String, Object>> operandWrap = HashBasedTable
+            .create();
 
-	/**
-	 * Keeps track of all visited nodes in a tree
-	 */
-	List<Tree> visited = new ArrayList<Tree>();
+    /**
+     * Keeps track of all visited nodes in a tree
+     */
+    List<Tree> visited = new ArrayList<Tree>();
 
-	Integer stackedToWrap = 0;
-	/**
-	 * A list of node categories that can be sequenced (i.e. which can be in a sequence with any number of other nodes in this list)
-	 */
-	private final List<String> sequentiableNodeTypes = Arrays.asList(new String[]{"OPWF", "OPLEM", "OPMORPH", "OPBEG", "OPEND", "OPIN", "OPBED", "OPELEM", "OPOR", "OPAND"});
-	/**
-	 * Keeps track of sequenced nodes, i.e. nodes that implicitly govern  a sequence, as in (C2PQ (OPWF der) (OPWF Mann)).
-	 * This is necessary in order to know when to take the sequence off the object stack, as the sequence is introduced by the
-	 * first child but cannot be closed after this first child in order not to lose its siblings
-	 */
-	private LinkedList<Tree> sequencedNodes = new LinkedList<Tree>();
+    Integer stackedToWrap = 0;
+    /**
+     * A list of node categories that can be sequenced (i.e. which can
+     * be in a sequence with any number of other nodes in this list)
+     */
+    private final List<String> sequentiableNodeTypes = Arrays
+            .asList(new String[] { "OPWF", "OPLEM", "OPMORPH", "OPBEG",
+                    "OPEND", "OPIN", "OPBED", "OPELEM", "OPOR", "OPAND" });
+    /**
+     * Keeps track of sequenced nodes, i.e. nodes that implicitly
+     * govern a sequence, as in (C2PQ (OPWF der) (OPWF Mann)). This is
+     * necessary in order to know when to take the sequence off the
+     * object stack, as the sequence is introduced by the first child
+     * but cannot be closed after this first child in order not to
+     * lose its siblings
+     */
+    private LinkedList<Tree> sequencedNodes = new LinkedList<Tree>();
 
-	private boolean nodeHasSequentiableSiblings;
+    private boolean nodeHasSequentiableSiblings;
 
-	/**
-	 * Keeps track of operands lists that are to be serialised in an inverted
-	 * order (e.g. the IN() operator) compared to their AST representation.
-	 */
-	private LinkedList<ArrayList<Object>> invertedOperandsLists = new LinkedList<ArrayList<Object>>();
-	/**
-	 * @param tree   The syntax tree as returned by ANTLR
-	 * @param parser The ANTLR parser instance that generated the parse tree
-	 * @throws QueryException
-	 */
-	public Cosmas2QueryProcessor(String query) {
-		CqlfObjectGenerator.setQueryProcessor(this);
-		this.query = query;
-		process(query);
-		log.info(">>> " + requestMap.get("query") + " <<<");
-	}
+    /**
+     * Keeps track of operands lists that are to be serialised in an
+     * inverted order (e.g. the IN() operator) compared to their AST
+     * representation.
+     */
+    private LinkedList<ArrayList<Object>> invertedOperandsLists = new LinkedList<ArrayList<Object>>();
 
-	@Override
-	public void process(String query) {
-		Tree tree = null;
-		tree = parseCosmasQuery(query);
-		log.info("Processing CosmasII query: "+query);
-		if (tree != null) {
-			log.debug("ANTLR parse tree: "+tree.toStringTree());
-			processNode(tree);
-		}
-	}
+    /**
+     * @param tree
+     *            The syntax tree as returned by ANTLR
+     * @param parser
+     *            The ANTLR parser instance that generated the parse
+     *            tree
+     * @throws QueryException
+     */
+    public Cosmas2QueryProcessor (String query) {
+        CqlfObjectGenerator.setQueryProcessor(this);
+        this.query = query;
+        process(query);
+        log.info(">>> " + requestMap.get("query") + " <<<");
+    }
 
-	private void processNode(Tree node) {
-		// Top-down processing
-		if (visited.contains(node)) return;
-		else visited.add(node);
+    @Override
+    public void process(String query) {
+        Tree tree = null;
+        tree = parseCosmasQuery(query);
+        log.info("Processing CosmasII query: " + query);
+        if (tree != null) {
+            log.debug("ANTLR parse tree: " + tree.toStringTree());
+            processNode(tree);
+        }
+    }
 
+    private void processNode(Tree node) {
+        // Top-down processing
+        if (visited.contains(node))
+            return;
+        else
+            visited.add(node);
 
-		String nodeCat = getNodeCat(node);
-		openNodeCats.push(nodeCat);
+        String nodeCat = getNodeCat(node);
+        openNodeCats.push(nodeCat);
 
-		stackedObjects = 0;
-		stackedToWrap = 0;
+        stackedObjects = 0;
+        stackedToWrap = 0;
 
-		if (verbose) {
-			System.err.println(" " + objectStack);
-			System.out.println(openNodeCats);
-		}
+        if (verbose) {
+            System.err.println(" " + objectStack);
+            System.out.println(openNodeCats);
+        }
 
+        /* ***************************************
+         * Processing individual node categories *
+         * ***************************************
+         */
 
-		/* ***************************************
-		 * Processing individual node categories *
-		 *****************************************/
-
-
-		// Check for potential implicit sequences as in (C2PQ (OPWF der) (OPWF Mann)). The sequence is introduced
-		// by the first child if it (and its siblings) is sequentiable.
-		if (sequentiableNodeTypes.contains(nodeCat)) {
-			// for each node, check if parent has more than one child (-> could be implicit sequence)
-			Tree parent = node.getParent();
-			if (parent.getChildCount() > 1) {
-				// if node is first child of parent...
-				if (node == parent.getChild(0)) {
-					nodeHasSequentiableSiblings = false;
-					for (int i = 1; i < parent.getChildCount(); i++) {
-						if (sequentiableNodeTypes.contains(getNodeCat(parent.getChild(i)))) {
-							nodeHasSequentiableSiblings = true;
-							continue;
-						}
-					}
-					if (nodeHasSequentiableSiblings) {
-						// Step I: create sequence
-						LinkedHashMap<String, Object> sequence = new LinkedHashMap<String, Object>();
-						sequence.put("@type", "korap:group");
-						sequence.put("operation", "operation:sequence");
-						sequence.put("operands", new ArrayList<Object>());
-						// push sequence on object stack but don't increment stackedObjects counter since
-						// we've got to wait until the parent node is processed - therefore, add the parent
-						// to the sequencedNodes list and remove the sequence from the stack when the parent
-						// has been processed
-						objectStack.push(sequence);
-						sequencedNodes.push(parent);
-						// Step II: decide where to put sequence
-						putIntoSuperObject(sequence, 1);
-					}
-				}
-			}
-		}
-		
-		if (nodeCat.equals("OPWF") || nodeCat.equals("OPLEM")) {
-			processOPWF_OPLEM(node);
-		}
-
-		if (nodeCat.equals("OPMORPH")) {
-			processOPMORPH(node);
-		}
-
-		if (nodeCat.equals("OPELEM")) {
-			processOPELEM(node);
-		}
-
-		if (nodeCat.equals("OPLABEL")) {
-			processOPLABEL(node);
-		}
-
-		if (nodeCat.equals("OPAND") || nodeCat.equals("OPNOT")) {
-			processOPAND_OPNOT(node);
-		}
-
-		if (nodeCat.equals("OPOR")) {
-			processOPOR(node);
-		}
-
-		if (nodeCat.equals("OPPROX")) {
-			processOPPROX(node);
-		}
-
-		// inlcusion or overlap
-		if (nodeCat.equals("OPIN") || nodeCat.equals("OPOV")) {
-			processOPIN_OPOV(node);
-		}
-
-		// Wrap the argument of an #IN operator in a previously defined container
-		if (nodeCat.equals("ARG1") || nodeCat.equals("ARG2"))  {
-			processARG1_ARG2(node);
-		}
-
-		if (nodeCat.equals("OPALL")) {
-			inOPALL = true;
-		}
-
-		if (nodeCat.equals("OPNHIT")) {
-			processOPNHIT(node);
-		}
-
-		if (nodeCat.equals("OPEND") || nodeCat.equals("OPBEG")) {
-			processOPEND_OPBEG(node);
-		}
-
-		if (nodeCat.equals("OPBED")) { 
-			processOPBED(node);
-		}
-		objectsToPop.push(stackedObjects);
-		toWrapsToPop.push(stackedToWrap);
-
-		/*
-		 ****************************************************************
-		 **************************************************************** 
-		 *  recursion until 'request' node (root of tree) is processed  *
-		 ****************************************************************
-		 ****************************************************************
-		 */
-		for (int i = 0; i < node.getChildCount(); i++) {
-			Tree child = node.getChild(i);
-			processNode(child);
-		}
-
-		/*
-		 **************************************************************
-		 * Stuff that happens after processing the children of a node *
-		 **************************************************************
-		 */
-
-		// remove sequence from object stack if node is implicitly sequenced
-		if (sequencedNodes.size() > 0) {
-			if (node == sequencedNodes.getFirst()) {
-				objectStack.pop();
-				sequencedNodes.pop();
-			}
-		}
-
-		for (int i = 0; i < objectsToPop.get(0); i++) {
-			objectStack.pop();
-		}
-		objectsToPop.pop();
-
-
-		//        if (!toWrapStack.isEmpty()) System.err.println(toWrapStack.get(0)[0]);
-		for (int i = 0; i < toWrapsToPop.get(0); i++) {
-			putIntoSuperObject(wrap(toWrapStack.pop()));
-		}
-		toWrapsToPop.pop();
-
-		if (nodeCat.equals("ARG2") && openNodeCats.get(1).equals("OPNOT")) {
-			negate = false;
-		}
-
-		if (nodeCat.equals("OPALL")) {
-			inOPALL = false;
-		}
-
-		openNodeCats.pop();
-	}
-
-	private void processOPEND_OPBEG(Tree node) {
-		// Step I: create group
-		String nodeCat = getNodeCat(node);
-		LinkedHashMap<String, Object> beggroup = new LinkedHashMap<String, Object>();
-		beggroup.put("@type", "korap:reference");
-		beggroup.put("operation", "operation:focus");
-		ArrayList<Integer> spanRef = new ArrayList<Integer>();
-		if (nodeCat.equals("OPBEG")) {
-			spanRef.add(0);
-			spanRef.add(1);
-		} else if (nodeCat.equals("OPEND")) {
-			spanRef.add(-1);
-			spanRef.add(1);
-		}
-		beggroup.put("spanRef", spanRef);
-		beggroup.put("operands", new ArrayList<Object>());
-		objectStack.push(beggroup);
-		stackedObjects++;
-
-		// Step II: decide where to put
-		putIntoSuperObject(beggroup, 1);
-	}
-
-	private void processOPBED(Tree node) {
-		// Node structure is (OPBED X+ (OPTS (TPBEG tpos*) (TPEND tpos*)))   
-		// X is some segment, TPBEG or TPEND must be present (inclusive OR)
-		// tpos is a three-char string of the form "[+-]?[spt][ae]". s/p/t indicates span, a/e beginning/end, - means negation
-		// See C-II QL documentation for more detail: 
-		// http://www.ids-mannheim.de/cosmas2/win-app/hilfe/suchanfrage/eingabe-grafisch/syntax/textpositionen.html
-		
-		// Step I: create group
-		int optsChild = node.getChildCount() - 1;
-		Tree begConditions = getFirstChildWithCat(node.getChild(optsChild), "TPBEG");
-		Tree endConditions = getFirstChildWithCat(node.getChild(optsChild), "TPEND");
-
-		LinkedHashMap<String, Object> submatchgroup = CqlfObjectGenerator.makeReference(128+classCounter);
-		ArrayList<Object> submatchOperands = new ArrayList<Object>();
-		submatchgroup.put("operands", submatchOperands);
-		putIntoSuperObject(submatchgroup);
-
-		// Step II: collect all conditions, create groups for them in processPositionCondition()
-		ArrayList<Object> distributedOperands = new ArrayList<Object>();
-		ArrayList<LinkedHashMap<String, Object>> conditionGroups = new ArrayList<LinkedHashMap<String, Object>>(); 
-		if (begConditions != null) {
-			for (Tree condition : getChildren(begConditions)) {
-				conditionGroups.add(processPositionCondition(condition, distributedOperands, "beg"));
-			}
-		}
-		if (endConditions != null) {
-			for (Tree condition : getChildren(endConditions)) {
-				conditionGroups.add(processPositionCondition(condition, distributedOperands, "end"));
-			}
-		}
-		// Step III: insert conditions. need to stack matches-groups because position groups may only have two operands
-		ArrayList<Object> currentLowestOperands = submatchOperands; // indicates where to insert next condition group
-		int conditionCount = 0;
-		for (LinkedHashMap<String,Object> conditionGroup : conditionGroups) {
-			conditionCount++;
-			if (conditionGroups.size()==1) {
-				submatchOperands.add(conditionGroup);
-			} else if (conditionCount < conditionGroups.size()) {
-				LinkedHashMap<String,Object> matchesGroup = CqlfObjectGenerator.makePosition(new String[]{"frames:matches"}, new String[0]);
-				@SuppressWarnings("unchecked")
-				ArrayList<Object> matchesOperands = (ArrayList<Object>) matchesGroup.get("operands");
-				matchesOperands.add(conditionGroup);
-				// matches groups that are embedded at the second or lower level receive an additional
-				// focus to grep out only the query term to which the constraint applies
-				if (conditionCount > 1) {
-					LinkedHashMap<String,Object> focus = CqlfObjectGenerator.makeReference(128+classCounter-conditionGroups.size()+conditionCount-1);
-					ArrayList<Object> focusOperands = new ArrayList<Object>();
-					focus.put("operands", focusOperands);
-					focusOperands.add(matchesGroup);
-					currentLowestOperands.add(focus);
-				} else {
-					currentLowestOperands.add(matchesGroup);
-				}
-				currentLowestOperands = matchesOperands;
-			} else {
-				currentLowestOperands.add(conditionGroup);
-			}
-		}
-	}
-
-	private void processOPNHIT(Tree node) {
-		Integer[] classRef = new Integer[]{128+classCounter+1, 128+classCounter+2}; 
-		//            classRef.add(classCounter + 1);  // yes, do this twice (two classes)!
-		LinkedHashMap<String, Object> group = CqlfObjectGenerator.makeReference(128+classCounter);
-		LinkedHashMap<String, Object> classRefCheck = CqlfObjectGenerator.makeClassRefOp("classRefOp:inversion", classRef, classCounter+128);
-		ArrayList<Object> operands = new ArrayList<Object>();
-		operands.add(classRefCheck);
-		group.put("operands", operands);
-		classCounter++;
-		wrapOperandInClass(node.getChild(0),1,classCounter++); // direct child is OPPROX
-		wrapOperandInClass(node.getChild(0),2,classCounter++);
-		objectStack.push(classRefCheck);
-		stackedObjects++;
-		putIntoSuperObject(group, 1);
-	}
-
-	private void processARG1_ARG2(Tree node) {
-		String nodeCat = getNodeCat(node);
-		Tree parent = node.getParent();
-		if (operandWrap.containsRow(parent)) {
-			// Step I: create group
-			int argNr = nodeCat.equals("ARG1") ? 1 : 2;
-			LinkedHashMap<String,Object> container = operandWrap.row(parent).get(argNr);
-			// Step II: ingest
-			if (container!=null) {
-				objectStack.push(container);
-				stackedObjects++;
-				putIntoSuperObject(container,1);
-			}
-		}
-	}
-	
-
-	@SuppressWarnings("unchecked")
-	private void processOPIN_OPOV(Tree node) {
-		// Step I: create group
-		String nodeCat = getNodeCat(node);
-		wrapOperandInClass(node,2,classCounter++);
-		wrapOperandInClass(node,1,classCounter++);
-		//            LinkedHashMap<String, Object> posgroup = makePosition(null);
-		LinkedHashMap<String, Object> posgroup = CqlfObjectGenerator.makeGroup("position");
-		LinkedHashMap<String, Object> positionOptions;
-		//            posgroup
-		if (nodeCat.equals("OPIN")) {
-			positionOptions = parseOPINOptions(node);
-		} else {
-			positionOptions = parseOPOVOptions(node);
-		}
-		posgroup.put("frames", positionOptions.get("frames"));
-		posgroup.put("frame", positionOptions.get("frame"));
-		if (positionOptions.containsKey("exclude")) {
-			posgroup.put("exclude", positionOptions.get("exclude"));
-		}
-		if (positionOptions.containsKey("grouping")) {
-			posgroup.put("grouping", positionOptions.get("grouping"));
-		}
-		objectStack.push(posgroup);
-		// mark this an inverted operands object
-		invertedOperandsLists.push((ArrayList<Object>) posgroup.get("operands"));
-		stackedObjects++;
-		// Step II: wrap in reference and decide where to put
-		ArrayList<String> check = (ArrayList<String>) positionOptions.get("classRefCheck");
-		Integer[] classIn = new Integer[]{128+classCounter-2,128+classCounter-1};
-		LinkedHashMap<String, Object> classRefCheck = CqlfObjectGenerator.makeClassRefCheck(check, classIn, 128+classCounter);
-		((ArrayList<Object>) classRefCheck.get("operands")).add(posgroup);
-		LinkedHashMap<String, Object> focusGroup = null;
-		if ((boolean) positionOptions.get("matchall") == true) {
-			focusGroup = CqlfObjectGenerator.makeResetReference();
-			((ArrayList<Object>) focusGroup.get("operands")).add(classRefCheck);
-		} else { // match only first argument
-			focusGroup = CqlfObjectGenerator.wrapInReference(classRefCheck, 128+classCounter-1);
-		}
-		putIntoSuperObject(focusGroup, 1);
-	}
-
-	@SuppressWarnings("unchecked")
-	private void processOPPROX(Tree node) {
-		// collect info
-		Tree prox_opts = node.getChild(0);
-		Tree typ = prox_opts.getChild(0);
-		Tree dist_list = prox_opts.getChild(1);
-		// Step I: create group
-		LinkedHashMap<String, Object> group = CqlfObjectGenerator.makeGroup("sequence");
-
-		ArrayList<Object> constraints = new ArrayList<Object>();
-		boolean exclusion = typ.getChild(0).toStringTree().equals("EXCL");
-
-		boolean inOrder = false;
-		boolean invertedOperands = false;
-
-		group.put("inOrder", inOrder);
-		group.put("distances", constraints);
-
-		boolean putIntoOverlapDisjunction = false;
-
-		int min = 0, max = 0;
-		// possibly several distance constraints
-		for (int i = 0; i < dist_list.getChildCount(); i++) {
-			String direction = dist_list.getChild(i).getChild(0).getChild(0).toStringTree().toLowerCase();
-			String minStr = dist_list.getChild(i).getChild(1).getChild(0).toStringTree();
-			String maxStr = dist_list.getChild(i).getChild(1).getChild(1).toStringTree();
-			String meas = dist_list.getChild(i).getChild(2).getChild(0).toStringTree();
-			if (minStr.equals("VAL0")) {
-				minStr = "0";
-			}
-			min = Integer.parseInt(minStr);
-			max = Integer.parseInt(maxStr);
-			// If zero word-distance, wrap this sequence in a disjunction along with an overlap position
-			// between the two operands
-			/*   
- 	XXX: This is currently deactivated. Uncomment to activate treatment of zero-word distances as overlap-alternatives
- 			(see google doc on special distances serialization)
-
-            if (meas.equals("w") && min == 0) {
-            	min = 1;
-            	putIntoOverlapDisjunction = true;
+        // Check for potential implicit sequences as in (C2PQ (OPWF
+        // der) (OPWF Mann)). The sequence is introduced
+        // by the first child if it (and its siblings) is
+        // sequentiable.
+        if (sequentiableNodeTypes.contains(nodeCat)) {
+            // for each node, check if parent has more than one child
+            // (-> could be implicit sequence)
+            Tree parent = node.getParent();
+            if (parent.getChildCount() > 1) {
+                // if node is first child of parent...
+                if (node == parent.getChild(0)) {
+                    nodeHasSequentiableSiblings = false;
+                    for (int i = 1; i < parent.getChildCount(); i++) {
+                        if (sequentiableNodeTypes.contains(getNodeCat(parent
+                                .getChild(i)))) {
+                            nodeHasSequentiableSiblings = true;
+                            continue;
+                        }
+                    }
+                    if (nodeHasSequentiableSiblings) {
+                        // Step I: create sequence
+                        LinkedHashMap<String, Object> sequence = new LinkedHashMap<String, Object>();
+                        sequence.put("@type", "korap:group");
+                        sequence.put("operation", "operation:sequence");
+                        sequence.put("operands", new ArrayList<Object>());
+                        // push sequence on object stack but don't
+                        // increment stackedObjects counter since
+                        // we've got to wait until the parent node is
+                        // processed - therefore, add the parent
+                        // to the sequencedNodes list and remove the
+                        // sequence from the stack when the parent
+                        // has been processed
+                        objectStack.push(sequence);
+                        sequencedNodes.push(parent);
+                        // Step II: decide where to put sequence
+                        putIntoSuperObject(sequence, 1);
+                    }
+                }
             }
-			 */
-			if (!meas.equals("w") && min == 0 ) {
-				processSpanDistance(meas,min,max);
-			}
-			LinkedHashMap<String, Object> distance = CqlfObjectGenerator.makeDistance(meas,min,max);
-			if (exclusion) {
-				distance.put("exclude", true);
-			}
-			//                if (! openNodeCats.get(1).equals("OPNHIT")) {
-			constraints.add(distance);
-			//                }
-			if (i==0) {
-				if (direction.equals("plus")) {
-					inOrder = true;
-				} else if (direction.equals("minus")) {
-					inOrder = true;
-					invertedOperands = true;
-				} else if (direction.equals("both")) {
-					inOrder = false;
-				}
-			}
-		}
-		group.put("inOrder", inOrder);
-		LinkedHashMap<String, Object> embeddedSequence = group;
+        }
 
-		if (! (openNodeCats.get(1).equals("OPBEG") || openNodeCats.get(1).equals("OPEND") || inOPALL || openNodeCats.get(1).equals("OPNHIT"))) {
-			wrapOperandInClass(node,1,classCounter);
-			wrapOperandInClass(node,2,classCounter);
-			group = CqlfObjectGenerator.wrapInReference(group, 128+classCounter++);
-		} else if (openNodeCats.get(1).equals("OPNHIT")) {
-			LinkedHashMap<String,Object> repetition = CqlfObjectGenerator.makeRepetition(min, max);
-			((ArrayList<Object>) repetition.get("operands")).add(CqlfObjectGenerator.makeToken());
-			// TODO go on with this: put the repetition into a class and put it in between the operands
-			// -> what if there's several distance constraints. with different keys, like /w4,s0? 
-		}
+        if (nodeCat.equals("OPWF") || nodeCat.equals("OPLEM")) {
+            processOPWF_OPLEM(node);
+        }
 
-		LinkedHashMap<String,Object> sequence = null;
-		if (putIntoOverlapDisjunction) {
-			sequence = embeddedSequence;
-			group = CqlfObjectGenerator.makeGroup("or");
-			ArrayList<Object> disjOperands = (ArrayList<Object>) group.get("operands");
-			String[] sharedClasses = new String[]{"intersects"};
-			LinkedHashMap<String,Object> overlapsGroup = CqlfObjectGenerator.makePosition(new String[0], sharedClasses);
+        if (nodeCat.equals("OPMORPH")) {
+            processOPMORPH(node);
+        }
 
-			ArrayList<Object> overlapsOperands = (ArrayList<Object>) overlapsGroup.get("operands");
-			// this ensures identity of the operands lists and thereby a distribution of the operands for both created objects 
-			sequence.put("operands", overlapsOperands);
-			if (invertedOperands) {
-				invertedOperandsLists.push(overlapsOperands);
-			}
-			disjOperands.add(overlapsGroup);
-			disjOperands.add(CqlfObjectGenerator.wrapInReference(sequence, 0));
-			// Step II: decide where to put
-			putIntoSuperObject(group, 0);
-			objectStack.push(sequence);
-		}
-		else {
-			if (invertedOperands) {
-				ArrayList<Object> operands = (ArrayList<Object>) embeddedSequence.get("operands");
-				invertedOperandsLists.push(operands);
-			}
-			// Step II: decide where to put
-			putIntoSuperObject(group, 0);
-			objectStack.push(embeddedSequence);
-		}
-		stackedObjects++;
-		visited.add(node.getChild(0));
-	}
+        if (nodeCat.equals("OPELEM")) {
+            processOPELEM(node);
+        }
 
-	private void processOPOR(Tree node) {
-		// Step I: create group
-		LinkedHashMap<String, Object> disjunction = new LinkedHashMap<String, Object>();
-		disjunction.put("@type", "korap:group");
-		disjunction.put("operation", "operation:or");
-		disjunction.put("operands", new ArrayList<Object>());
-		objectStack.push(disjunction);
-		stackedObjects++;
-		// Step II: decide where to put
-		putIntoSuperObject(disjunction, 1);
-	}
+        if (nodeCat.equals("OPLABEL")) {
+            processOPLABEL(node);
+        }
 
-	private void processOPAND_OPNOT(Tree node) {
-		// Step I: create group
-		String nodeCat = getNodeCat(node);
-		LinkedHashMap<String, Object> distgroup = new LinkedHashMap<String, Object>();
-		distgroup.put("@type", "korap:group");
-		distgroup.put("operation", "operation:sequence");
-		ArrayList<Object> distances = new ArrayList<Object>();
-		LinkedHashMap<String, Object> zerodistance = new LinkedHashMap<String, Object>();
-		zerodistance.put("@type", "cosmas:distance");
-		zerodistance.put("key", "t");
-		zerodistance.put("min", 0);
-		zerodistance.put("max", 0);
-		if (nodeCat.equals("OPNOT")) zerodistance.put("exclude", true);
-		distances.add(zerodistance);
-		distgroup.put("distances", distances);
-		distgroup.put("operands", new ArrayList<Object>());
-		objectStack.push(distgroup);
-		stackedObjects++;
-		// Step II: decide where to put
-		putIntoSuperObject(distgroup, 1);
-	}
+        if (nodeCat.equals("OPAND") || nodeCat.equals("OPNOT")) {
+            processOPAND_OPNOT(node);
+        }
 
-	private void processOPLABEL(Tree node) {
-		// Step I: create element
-		LinkedHashMap<String, Object> elem = new LinkedHashMap<String, Object>();
-		elem.put("@type", "korap:span");
-		elem.put("key", node.getChild(0).toStringTree().replaceAll("<|>", ""));
-		//Step II: decide where to put
-		putIntoSuperObject(elem);
-	}
+        if (nodeCat.equals("OPOR")) {
+            processOPOR(node);
+        }
 
-	@SuppressWarnings("unchecked")
-	private void processOPELEM(Tree node) {
-		// Step I: create element
-		LinkedHashMap<String, Object> span = CqlfObjectGenerator.makeSpan();
-		if (node.getChild(0).toStringTree().equals("EMPTY")) {
+        if (nodeCat.equals("OPPROX")) {
+            processOPPROX(node);
+        }
 
-		} else {
-			int elname = 0;
-			Tree elnameNode = getFirstChildWithCat(node, "ELNAME");
-			if (elnameNode != null) {
-				span.put("key", elnameNode.getChild(0).toStringTree().toLowerCase());
-				elname = 1;
-			}
-			if (node.getChildCount() > elname) {
-				/*
-				 * Attributes can carry several values, like #ELEM(W ANA != 'N V'), 
-				 * denoting a word whose POS is neither N nor V.
-				 * When seeing this, create a sub-termGroup and put it into the top-level
-				 * term group, but only if there are other attributes in that group. If
-				 * not, put the several values as distinct attr-val-pairs into the
-				 * top-level group (in order to avoid a top-level group that only
-				 * contains a sub-group).
-				 */
-				LinkedHashMap<String, Object> termGroup = CqlfObjectGenerator.makeTermGroup("and");
-				ArrayList<Object> termGroupOperands = (ArrayList<Object>) termGroup.get("operands");
-				for (int i = elname; i < node.getChildCount(); i++) {
-					Tree attrNode = node.getChild(i);
-					if (attrNode.getChildCount() == 2) {
-						LinkedHashMap<String, Object> term = CqlfObjectGenerator.makeTerm();
-						termGroupOperands.add(term);
-						String layer = attrNode.getChild(0).toStringTree();
-						String[] splitted = layer.split("/");
-						if (splitted.length > 1) {
-							term.put("foundry", splitted[0]);
-							layer = splitted[1];
-						}
-						term.put("layer", translateMorph(layer));
-						term.put("key", attrNode.getChild(1).toStringTree());
-						String match = getNodeCat(attrNode).equals("EQ") ? "eq" : "ne";
-						term.put("match", "match:" + match);
-					} else {
-						LinkedHashMap<String, Object> subTermGroup = CqlfObjectGenerator.makeTermGroup("and");
-						ArrayList<Object> subTermGroupOperands = (ArrayList<Object>) subTermGroup.get("operands");
-						int j;
-						for (j = 1; j < attrNode.getChildCount(); j++) {
-							LinkedHashMap<String, Object> term = CqlfObjectGenerator.makeTerm();
-							String layer = attrNode.getChild(0).toStringTree();
-							String[] splitted = layer.split("/");
-							if (splitted.length > 1) {
-								term.put("foundry", splitted[0]);
-								layer = splitted[1];
-							}
-							term.put("layer", translateMorph(layer));
-							term.put("key", attrNode.getChild(j).toStringTree());
-							String match = getNodeCat(attrNode).equals("EQ") ? "eq" : "ne";
-							term.put("match", "match:" + match);
-							if (node.getChildCount() == elname + 1) {
-								termGroupOperands.add(term);
-							} else {
-								subTermGroupOperands.add(term);
-							}
-						}
-						if (node.getChildCount() > elname + 1) {
-							termGroupOperands.add(subTermGroup);
-						}
-					}
-					if (getNodeCat(attrNode).equals("NOTEQ")) negate = true;
-				}
-				// possibly only one term was present throughout all nodes: extract it from the group
-				if (termGroupOperands.size()==1) {
-					termGroup = (LinkedHashMap<String, Object>) termGroupOperands.get(0);
-				}
-				span.put("attr", termGroup);
-			}
-		}
-		//Step II: decide where to put
-		putIntoSuperObject(span);
-	}
+        // inlcusion or overlap
+        if (nodeCat.equals("OPIN") || nodeCat.equals("OPOV")) {
+            processOPIN_OPOV(node);
+        }
 
-	private void processOPMORPH(Tree node) {
-		//Step I: get info
-		String[] morphterms = node.getChild(0).toStringTree().replace(" ", "").split("&");
-		LinkedHashMap<String, Object> token = CqlfObjectGenerator.makeToken();
-		ArrayList<Object> terms = new ArrayList<Object>();
-		LinkedHashMap<String, Object> fieldMap = null;
-		for (String morphterm : morphterms) {
-			// regex group #2 is foundry, #4 layer, #5 operator #6 key, #8 value
-			Pattern p = Pattern.compile("((\\w+)/)?((\\w*)(!?=))?(\\w+)(:(\\w+))?");    
-			Matcher m = p.matcher(morphterm);										  
-			if (! m.matches()) {
-				addError(StatusCodes.UNKNOWN_QUERY_ERROR, "Something went wrong parsing the argument in MORPH().");
-				requestMap.put("query", new LinkedHashMap<String, Object>());
-				return;
-			}
-			
-			fieldMap = new LinkedHashMap<String, Object>();
-			fieldMap.put("@type", "korap:term");
-			
-			if (m.group(2) != null) fieldMap.put("foundry", m.group(2));
-			if (m.group(4) != null) fieldMap.put("layer", m.group(4));
-			if (m.group(5) != null) {
-				if ("!=".equals(m.group(5))) negate = !negate; 
-			}
-			if (m.group(6) != null) fieldMap.put("key", m.group(6));
-			if (m.group(8) != null) fieldMap.put("value", m.group(8));
+        // Wrap the argument of an #IN operator in a previously
+        // defined container
+        if (nodeCat.equals("ARG1") || nodeCat.equals("ARG2")) {
+            processARG1_ARG2(node);
+        }
 
-			// negate field (see above)
-			if (negate) {
-				fieldMap.put("match", "match:ne");
-			} else {
-				fieldMap.put("match", "match:eq");
-			}
-			terms.add(fieldMap);
-		}
-		if (morphterms.length == 1) {
-			token.put("wrap", fieldMap);
-		} else {
-			LinkedHashMap<String, Object> termGroup = CqlfObjectGenerator.makeTermGroup("and");
-			termGroup.put("operands", terms);
-			token.put("wrap", termGroup);
-		}
-		//Step II: decide where to put
-		putIntoSuperObject(token, 0);
-		visited.add(node.getChild(0));
-	}
+        if (nodeCat.equals("OPALL")) {
+            inOPALL = true;
+        }
 
-	/**
-	 * Nodes introducing tokens. Process all in the same manner, except for the fieldMap entry
-	 * @param node
-	 */
-	private void processOPWF_OPLEM(Tree node) {
-		String nodeCat = getNodeCat(node);
-		//Step I: get info
-		LinkedHashMap<String, Object> token = new LinkedHashMap<String, Object>();
-		token.put("@type", "korap:token");
-		objectStack.push(token);
-		stackedObjects++;
-		LinkedHashMap<String, Object> fieldMap = new LinkedHashMap<String, Object>();
-		token.put("wrap", fieldMap);
+        if (nodeCat.equals("OPNHIT")) {
+            processOPNHIT(node);
+        }
 
-		fieldMap.put("@type", "korap:term");
-		// make category-specific fieldMap entry
-		String attr = nodeCat.equals("OPWF") ? "orth" : "lemma";
-		String value = node.getChild(0).toStringTree().replaceAll("\"", "");
-		// check for wildcard string
-		Pattern p = Pattern.compile("[+*?]");
-		Matcher m = p.matcher(value);
-		if (m.find()) fieldMap.put("type", "type:wildcard");
+        if (nodeCat.equals("OPEND") || nodeCat.equals("OPBEG")) {
+            processOPEND_OPBEG(node);
+        }
 
-		if (value.startsWith("$")) {
-			value = value.substring(1);
-			fieldMap.put("caseInsensitive", true);
-		}
+        if (nodeCat.equals("OPBED")) {
+            processOPBED(node);
+        }
+        objectsToPop.push(stackedObjects);
+        toWrapsToPop.push(stackedToWrap);
 
-		fieldMap.put("key", value);
-		fieldMap.put("layer", attr);
+        /*
+         * ***************************************************************
+         * ***************************************************************
+         * recursion until 'request' node (root of tree) is processed
+         * *
+         * ***********************************************************
+         * ****
+         * ********************************************************
+         * *******
+         */
+        for (int i = 0; i < node.getChildCount(); i++) {
+            Tree child = node.getChild(i);
+            processNode(child);
+        }
 
-		// negate field (see above)
-		if (negate) {
-			fieldMap.put("match", "match:ne");
-		} else {
-			fieldMap.put("match", "match:eq");
-		}
-		//Step II: decide where to put
-		if (!hasChild(node, "TPOS")) {
-			putIntoSuperObject(token, 1);
-			visited.add(node.getChild(0));
-		} else {
-			// TODO
-		}
-	}
+        /*
+         * *************************************************************
+         * Stuff that happens after processing the children of a node
+         * *
+         * ***********************************************************
+         * **
+         */
 
-	private void processSpanDistance(String meas, int min, int max) {
-		// Do stuff here in case we'll decide one day to treat span distances in a special way.
-		// (see GDoc Special Distances Serialization)
-	}
+        // remove sequence from object stack if node is implicitly
+        // sequenced
+        if (sequencedNodes.size() > 0) {
+            if (node == sequencedNodes.getFirst()) {
+                objectStack.pop();
+                sequencedNodes.pop();
+            }
+        }
 
-	/**
-	 * Registers an entry in the {@link #operandWrap} table in order to allow an operator's arguments
-	 * (or only one of them) to be wrapped in a class group.
-	 * @param node The operator node (parent node of the ARG1/ARG2 node)
-	 * @param arg The argument number (1 or 2)
-	 * @param cls The class id.
-	 */
-	private void wrapOperandInClass(Tree node, int arg, int cls) {
-		LinkedHashMap<String,Object> clsGroup = CqlfObjectGenerator.makeSpanClass(cls);
-		wrapOperand(node,arg,clsGroup);
-	}
+        for (int i = 0; i < objectsToPop.get(0); i++) {
+            objectStack.pop();
+        }
+        objectsToPop.pop();
 
-	/**
-	 * Registers an entry in the {@link #operandWrap} table in order to allow an operator's arguments
-	 * (or only one of them) to be wrapped in an arbitrary object, e.g. a reference group.
-	 * @param node The operator node (parent node of the ARG1/ARG2 node)
-	 * @param arg The argument number (1 or 2)
-	 * @param container The object in whose operand list the argument shall be wrapped.
-	 */
-	private void wrapOperand(Tree node, int arg, LinkedHashMap<String, Object> container) {
-		operandWrap.put(node, arg, container);
-	}
+        // if (!toWrapStack.isEmpty())
+        // System.err.println(toWrapStack.get(0)[0]);
+        for (int i = 0; i < toWrapsToPop.get(0); i++) {
+            putIntoSuperObject(wrap(toWrapStack.pop()));
+        }
+        toWrapsToPop.pop();
 
-	private Object translateMorph(String layer) {
-		// might be extended...
-		if (layer.equals("ANA"))
-			return ResourceMapper.descriptor2policy("ANA");
-		else
-			return layer;
+        if (nodeCat.equals("ARG2") && openNodeCats.get(1).equals("OPNOT")) {
+            negate = false;
+        }
 
-	}
+        if (nodeCat.equals("OPALL")) {
+            inOPALL = false;
+        }
 
-	@SuppressWarnings("unchecked")
-	/**
-	 * Processes individual position conditions as provided in the OPTS node under the OPBEG node.
-	 * #BEG allows to specify position constrains that apply to the beginning or the end of the subquery X.
-	 * E.g., in #BEG(X, tpos/tpos), the 'tpos' constraints before the slash indicate conditions that apply 
-	 * to the beginning of X, those after the slash are conditions that apply to the end of X.
-	 * See the official C-II documentation for more details. <br/><br/>
-	 * What's important here is what follows: <br/>
-	 * Assume the query #BED(der Mann, sa/pa). This means that <b>the beginning<b/> of "der Mann" stands at
-	 * the beginning of a sentence and that <b>the end</b> (because this constraint comes after the slash) stands at the 
-	 * beginning of a paragraph. The "end" means the last item, here "Mann", so this token comes at the beginning
-	 * of a paragraph. To capture this, we choose spanRefs: The last item of X matches the first item of the span (here: P). 
-	 * @param cond
-	 * @param distributedOperands
-	 * @param mode
-	 * @return
-	 */
-	private LinkedHashMap<String, Object> processPositionCondition(Tree cond, ArrayList<Object> distributedOperands, String mode) {
-		boolean negated = false;
-		String elem; // the 'span' (s/p/t)
-		String position = "frames:matches"; // default
-		Integer[] elemSpanRef = null; // spanRef to be used for the element ('span')
-		Integer[] hitSpanRef = null; // spanRef to be used for the subquery X 
+        openNodeCats.pop();
+    }
 
-		String nodeString = cond.toStringTree();
-		if (nodeString.startsWith("-")) {
-			negated = true;
-			nodeString = nodeString.substring(1);
-		} else if (nodeString.startsWith("+")) {
-			nodeString = nodeString.substring(1);
-		}
+    private void processOPEND_OPBEG(Tree node) {
+        // Step I: create group
+        String nodeCat = getNodeCat(node);
+        LinkedHashMap<String, Object> beggroup = new LinkedHashMap<String, Object>();
+        beggroup.put("@type", "korap:reference");
+        beggroup.put("operation", "operation:focus");
+        ArrayList<Integer> spanRef = new ArrayList<Integer>();
+        if (nodeCat.equals("OPBEG")) {
+            spanRef.add(0);
+            spanRef.add(1);
+        }
+        else if (nodeCat.equals("OPEND")) {
+            spanRef.add(-1);
+            spanRef.add(1);
+        }
+        beggroup.put("spanRef", spanRef);
+        beggroup.put("operands", new ArrayList<Object>());
+        objectStack.push(beggroup);
+        stackedObjects++;
 
-		elem = nodeString.substring(0, 1);
-		nodeString = nodeString.substring(1);
+        // Step II: decide where to put
+        putIntoSuperObject(beggroup, 1);
+    }
 
-		// in cases where the end of X shall match the beginning of the span, or vice versa, 
-		// we need to define spanRefs
-		if (mode.equals("beg")) {
-			if (nodeString.equals("a")) {
-				position = "frames:startswith";
-			} else if (nodeString.equals("e")) {
-				hitSpanRef = new Integer[]{0,1};
-				elemSpanRef = new Integer[]{-1,1};
-			}
-		} else if (mode.equals("end")) {
-			if (nodeString.equals("e")) {
-				position = "frames:endswith";
-			} else if (nodeString.equals("a")) {
-				hitSpanRef = new Integer[]{0,1};
-				elemSpanRef = new Integer[]{-1,1};
-			}
-		}
-		// Create the position group and add the span and the subquery as operands, possibly wrapped in spanRefs
-		LinkedHashMap<String, Object> positionGroup = CqlfObjectGenerator.makePosition(new String[]{position}, new String[0]);
-		if (negated) positionGroup.put("exclude", true);
-		ArrayList<Object> posOperands = new ArrayList<Object>();
-		LinkedHashMap<String, Object> classGroup = CqlfObjectGenerator.makeSpanClass(classCounter++);
-		classGroup.put("operands", distributedOperands);
-		positionGroup.put("operands", posOperands);
-		LinkedHashMap<String, Object> span = new LinkedHashMap<String, Object>();
-		span.put("@type", "korap:span");
-		span.put("key", elem);
-		objectStack.push(classGroup);
-		if (hitSpanRef != null) {
-			LinkedHashMap<String, Object> spanRefAroundHit = CqlfObjectGenerator.makeSpanReference(hitSpanRef, "focus");
-			((ArrayList<Object>) spanRefAroundHit.get("operands")).add(classGroup);
-			classGroup = spanRefAroundHit; //re-assign after wrapping classGroup in spanRef
-		}
-		if (elemSpanRef != null) {
-			LinkedHashMap<String, Object> spanRefAroundSpan = CqlfObjectGenerator.makeSpanReference(elemSpanRef, "focus");
-			((ArrayList<Object>) spanRefAroundSpan.get("operands")).add(span);
-			span = spanRefAroundSpan; //re-assign after wrapping span in spanRef
-		}
-		posOperands.add(span);
-		posOperands.add(classGroup);
-		return positionGroup;
-	}
+    private void processOPBED(Tree node) {
+        // Node structure is (OPBED X+ (OPTS (TPBEG tpos*) (TPEND
+        // tpos*)))
+        // X is some segment, TPBEG or TPEND must be present
+        // (inclusive OR)
+        // tpos is a three-char string of the form "[+-]?[spt][ae]".
+        // s/p/t indicates span, a/e beginning/end, - means negation
+        // See C-II QL documentation for more detail:
+        // http://www.ids-mannheim.de/cosmas2/win-app/hilfe/suchanfrage/eingabe-grafisch/syntax/textpositionen.html
 
-	private LinkedHashMap<String, Object> parseOPINOptions(Tree node) {
-		Tree posnode = getFirstChildWithCat(node, "POS");
-		Tree rangenode = getFirstChildWithCat(node, "RANGE");
-		Tree exclnode = getFirstChildWithCat(node, "EXCL");
-		Tree groupnode = getFirstChildWithCat(node, "GROUP");
-		boolean negatePosition = false;
-		LinkedHashMap<String, Object> posOptions = new LinkedHashMap<String, Object>();
-		ArrayList<String> positions = new ArrayList<String>();
-		ArrayList<String> classRefCheck = new ArrayList<String>();
-		posOptions.put("matchall", false);
-		String frame = "";
-		String posOption = null; 
-		if (posnode != null) {
-			posOption = posnode.getChild(0).toStringTree();
-			switch (posOption) {
-			case "L":
-				positions.add("frames:startswith");
-				classRefCheck.add("classRefCheck:includes");
-				frame = "startswith";
-				break;
-			case "R":
-				positions.add("frames:endswith");
-				classRefCheck.add("classRefCheck:includes");
-				frame = "endswith";
-				break;
-			case "F":
-				positions.add("frames:matches");
-				classRefCheck.add("classRefCheck:includes");
-				frame = "matches";
-				break;
-			case "FE":
-				positions.add("frames:matches");
-				classRefCheck.add("classRefCheck:equals");
-				frame = "matches";
-				break;
-			case "FI":
-				positions.add("frames:matches");
-				classRefCheck.add("classRefCheck:unequals");
-				classRefCheck.add("classRefCheck:includes");
-				frame = "matches-noident";
-				break;
-			case "N": 
-				positions.add("frames:contains");
-				classRefCheck.add("classRefCheck:includes");
-				frame = "contains";
-				break;
-			}
-		} else {
-			classRefCheck.add("classRefCheck:includes");
-			frame = "contains";
-		}
-		posOptions.put("frames", positions);
-		posOptions.put("classRefCheck", classRefCheck);
-		posOptions.put("frame", "frame:"+frame);
-		addMessage(StatusCodes.DEPRECATED_QUERY_ELEMENT, "Deprecated 2014-09-22: 'frame' only to be supported until 3 months from deprecation date. " +
-				"Position frames are now expressed through 'frames' and 'sharedClasses'");
+        // Step I: create group
+        int optsChild = node.getChildCount() - 1;
+        Tree begConditions = getFirstChildWithCat(node.getChild(optsChild),
+                "TPBEG");
+        Tree endConditions = getFirstChildWithCat(node.getChild(optsChild),
+                "TPEND");
 
-		if (exclnode != null) {
-			if (exclnode.getChild(0).toStringTree().equals("YES")) {
-				negatePosition = !negatePosition;
-			}
-		}
+        LinkedHashMap<String, Object> submatchgroup = 
+                CqlfObjectGenerator.makeReference(128 + classCounter);
+        ArrayList<Object> submatchOperands = new ArrayList<Object>();
+        submatchgroup.put("operands", submatchOperands);
+        putIntoSuperObject(submatchgroup);
 
-		if (rangenode != null) {
-			String range = rangenode.getChild(0).toStringTree().toLowerCase();
-			if (range.equals("all")) {
-				posOptions.put("matchall", true);
-				//            	LinkedHashMap<String,Object> ref = makeResetReference(); // reset all defined classes
-				//            	wrapOperand(node,2,ref);
-			}
-		}
+        // Step II: collect all conditions, create groups for them in
+        // processPositionCondition()
+        ArrayList<Object> distributedOperands = new ArrayList<Object>();
+        ArrayList<LinkedHashMap<String, Object>> conditionGroups = 
+                new ArrayList<LinkedHashMap<String, Object>>();
+        if (begConditions != null) {
+            for (Tree condition : getChildren(begConditions)) {
+                conditionGroups.add(processPositionCondition(condition,
+                        distributedOperands, "beg"));
+            }
+        }
+        if (endConditions != null) {
+            for (Tree condition : getChildren(endConditions)) {
+                conditionGroups.add(processPositionCondition(condition,
+                        distributedOperands, "end"));
+            }
+        }
+        // Step III: insert conditions. need to stack matches-groups
+        // because position groups may only have two operands
+        // indicates where to insert next condition group
+        ArrayList<Object> currentLowestOperands = submatchOperands; 
+        int conditionCount = 0;
+        for (LinkedHashMap<String, Object> conditionGroup : conditionGroups) {
+            conditionCount++;
+            if (conditionGroups.size() == 1) {
+                submatchOperands.add(conditionGroup);
+            }
+            else if (conditionCount < conditionGroups.size()) {
+                LinkedHashMap<String, Object> matchesGroup = CqlfObjectGenerator
+                        .makePosition(new String[] { "frames:matches" },
+                                new String[0]);
+                @SuppressWarnings("unchecked")
+                ArrayList<Object> matchesOperands = 
+                    (ArrayList<Object>) matchesGroup.get("operands");
+                matchesOperands.add(conditionGroup);
+                // matches groups that are embedded at the second or
+                // lower level receive an additional
+                // focus to grep out only the query term to which the
+                // constraint applies
+                if (conditionCount > 1) {
+                    LinkedHashMap<String, Object> focus = 
+                            CqlfObjectGenerator.makeReference(128 + classCounter - 2);
+                    ArrayList<Object> focusOperands = new ArrayList<Object>();
+                    focus.put("operands", focusOperands);
+                    focusOperands.add(matchesGroup);
+                    currentLowestOperands.add(focus);
+                }
+                else {
+                    currentLowestOperands.add(matchesGroup);
+                }
+                currentLowestOperands = matchesOperands;
+            }
+            else {
+                currentLowestOperands.add(conditionGroup);
+            }
+        }
+    }
 
-		if (negatePosition) {
-			posOptions.put("exclude", "true");
-		}
+    private void processOPNHIT(Tree node) {
+        Integer[] classRef = new Integer[] { 128 + classCounter + 1,
+                128 + classCounter + 2 };
+        // classRef.add(classCounter + 1); // yes, do this twice (two
+        // classes)!
+        LinkedHashMap<String, Object> group = 
+                CqlfObjectGenerator.makeReference(128 + classCounter);
+        LinkedHashMap<String, Object> classRefCheck = 
+                CqlfObjectGenerator.makeClassRefOp("classRefOp:inversion", classRef,
+                        classCounter + 128);
+        ArrayList<Object> operands = new ArrayList<Object>();
+        operands.add(classRefCheck);
+        group.put("operands", operands);
+        classCounter++;
+        // direct child is OPPROX
+        wrapOperandInClass(node.getChild(0), 1, classCounter++); 
+        wrapOperandInClass(node.getChild(0), 2, classCounter++);
+        objectStack.push(classRefCheck);
+        stackedObjects++;
+        putIntoSuperObject(group, 1);
+    }
 
-		if (groupnode != null) {
-			String grouping = groupnode.getChild(0).toStringTree().equals("max") ? "true" : "false";
-			posOptions.put("grouping", grouping);
-		}
-		return posOptions;
-	}
+    private void processARG1_ARG2(Tree node) {
+        String nodeCat = getNodeCat(node);
+        Tree parent = node.getParent();
+        if (operandWrap.containsRow(parent)) {
+            // Step I: create group
+            int argNr = nodeCat.equals("ARG1") ? 1 : 2;
+            LinkedHashMap<String, Object> container = operandWrap.row(parent)
+                    .get(argNr);
+            // Step II: ingest
+            if (container != null) {
+                objectStack.push(container);
+                stackedObjects++;
+                putIntoSuperObject(container, 1);
+            }
+        }
+    }
 
+    @SuppressWarnings("unchecked")
+    private void processOPIN_OPOV(Tree node) {
+        // Step I: create group
+        String nodeCat = getNodeCat(node);
+        wrapOperandInClass(node, 2, classCounter++);
+        wrapOperandInClass(node, 1, classCounter++);
+        // LinkedHashMap<String, Object> posgroup =
+        // makePosition(null);
+        LinkedHashMap<String, Object> posgroup = CqlfObjectGenerator
+                .makeGroup("position");
+        LinkedHashMap<String, Object> positionOptions;
+        // posgroup
+        if (nodeCat.equals("OPIN")) {
+            positionOptions = parseOPINOptions(node);
+        }
+        else {
+            positionOptions = parseOPOVOptions(node);
+        }
+        posgroup.put("frames", positionOptions.get("frames"));
+        posgroup.put("frame", positionOptions.get("frame"));
+        if (positionOptions.containsKey("exclude")) {
+            posgroup.put("exclude", positionOptions.get("exclude"));
+        }
+        if (positionOptions.containsKey("grouping")) {
+            posgroup.put("grouping", positionOptions.get("grouping"));
+        }
+        objectStack.push(posgroup);
+        // mark this an inverted operands object
+        invertedOperandsLists.push((ArrayList<Object>) posgroup.get("operands"));
+        stackedObjects++;
+        // Step II: wrap in reference and decide where to put
+        ArrayList<String> check = 
+                (ArrayList<String>) positionOptions.get("classRefCheck");
+        Integer[] classIn = 
+                new Integer[] { 128 + classCounter - 2, 128 + classCounter - 1 };
+        LinkedHashMap<String, Object> classRefCheck = 
+                CqlfObjectGenerator.makeClassRefCheck(check, classIn, 128 + classCounter);
+        ((ArrayList<Object>) classRefCheck.get("operands")).add(posgroup);
+        LinkedHashMap<String, Object> focusGroup = null;
+        if ((boolean) positionOptions.get("matchall") == true) {
+            focusGroup = CqlfObjectGenerator.makeResetReference();
+            ((ArrayList<Object>) focusGroup.get("operands")).add(classRefCheck);
+        }
+        else { // match only first argument
+            focusGroup = CqlfObjectGenerator.wrapInReference(classRefCheck,
+                    128 + classCounter - 1);
+        }
+        putIntoSuperObject(focusGroup, 1);
+    }
 
+    @SuppressWarnings("unchecked")
+    private void processOPPROX(Tree node) {
+        // collect info
+        Tree prox_opts = node.getChild(0);
+        Tree typ = prox_opts.getChild(0);
+        Tree dist_list = prox_opts.getChild(1);
+        // Step I: create group
+        LinkedHashMap<String, Object> group = CqlfObjectGenerator
+                .makeGroup("sequence");
 
-	private LinkedHashMap<String, Object> parseOPOVOptions(Tree node) {
-		boolean negatePosition = false;
-		Tree posnode = getFirstChildWithCat(node, "POS");
-		Tree rangenode = getFirstChildWithCat(node, "RANGE");
-		Tree exclnode = getFirstChildWithCat(node, "EXCL");
-		Tree groupnode = getFirstChildWithCat(node, "GROUP");
-		LinkedHashMap<String, Object> posOptions = new LinkedHashMap<String, Object>();
-		ArrayList<String> positions = new ArrayList<String>();
-		ArrayList<String> classRefCheck = new ArrayList<String>();
-		posOptions.put("matchall", false);
-		String frame = "";
-		String posOption = null; 
-		if (posnode != null) {
-			posOption = posnode.getChild(0).toStringTree();
-			switch (posOption) {
-			case "L":
-				positions.add("frames:startswith");
-				positions.add("frames:overlapsLeft");
-				classRefCheck.add("classRefCheck:intersects");
-				frame = "overlapsLeft";
-				break;
-			case "R":
-				positions.add("frames:endswith");
-				positions.add("frames:overlapsRight");
-				classRefCheck.add("classRefCheck:intersects");
-				frame = "overlapsRight";
-				break;
-			case "F":
-				positions.add("frames:matches");
-				classRefCheck.add("classRefCheck:intersects");
-				frame = "matches";
-				break;
-			case "FE":
-				positions.add("frames:matches");
-				classRefCheck.add("classRefCheck:equals");
-				frame = "matches";
-				break;
-			case "FI":
-				positions.add("frames:matches");
-				classRefCheck.add("classRefCheck:unequals");
-				frame = "matches-noident";
-				break;
-			case "X": 
-				positions.add("frames:contains");
-				classRefCheck.add("classRefCheck:intersects");
-				frame = "overlaps";
-				break;
-			}
-		} else {
-			classRefCheck.add("classRefCheck:intersects");
-			frame = "overlaps";
-		}
+        ArrayList<Object> constraints = new ArrayList<Object>();
+        boolean exclusion = typ.getChild(0).toStringTree().equals("EXCL");
 
-		posOptions.put("frames", positions);
-		posOptions.put("classRefCheck", classRefCheck);
-		posOptions.put("frame", "frame:"+frame);
-		addMessage(StatusCodes.DEPRECATED_QUERY_ELEMENT, "Deprecated 2014-09-22: 'frame' only to be supported until 3 months from deprecation date. " +
-				"Position frames are now expressed through 'frames' and 'sharedClasses'");
+        boolean inOrder = false;
+        boolean invertedOperands = false;
 
-		if (exclnode != null) {
-			if (exclnode.getChild(0).toStringTree().equals("YES")) {
-				negatePosition = !negatePosition;
-			}
-		}
+        group.put("inOrder", inOrder);
+        group.put("distances", constraints);
 
-		if (rangenode != null) {
-			String range = rangenode.getChild(0).toStringTree().toLowerCase();
-			if (range.equals("all")) {
-				posOptions.put("matchall", true);
-				//            	LinkedHashMap<String,Object> ref = makeResetReference(); // reset all defined classes
-				//            	wrapOperand(node,2,ref);
-			}
-		}
+        boolean putIntoOverlapDisjunction = false;
 
-		if (negatePosition) {
-			posOptions.put("exclude", "true");
-		}
+        int min = 0, max = 0;
+        // possibly several distance constraints
+        for (int i = 0; i < dist_list.getChildCount(); i++) {
+            String direction = dist_list.getChild(i).getChild(0).getChild(0)
+                    .toStringTree().toLowerCase();
+            String minStr = dist_list.getChild(i).getChild(1).getChild(0)
+                    .toStringTree();
+            String maxStr = dist_list.getChild(i).getChild(1).getChild(1)
+                    .toStringTree();
+            String meas = dist_list.getChild(i).getChild(2).getChild(0)
+                    .toStringTree();
+            if (minStr.equals("VAL0")) {
+                minStr = "0";
+            }
+            min = Integer.parseInt(minStr);
+            max = Integer.parseInt(maxStr);
+            // If zero word-distance, wrap this sequence in a
+            // disjunction along with an overlap position
+            // between the two operands
+            /*
+             * XXX: This is currently deactivated. Uncomment to
+             * activate treatment of zero-word distances as
+             * overlap-alternatives (see google doc on special
+             * distances serialization)
+             * 
+             * if (meas.equals("w") && min == 0) { min = 1;
+             * putIntoOverlapDisjunction = true; }
+             */
+            if (!meas.equals("w") && min == 0) {
+                processSpanDistance(meas, min, max);
+            }
+            LinkedHashMap<String, Object> distance = CqlfObjectGenerator
+                    .makeDistance(meas, min, max);
+            if (exclusion) {
+                distance.put("exclude", true);
+            }
+            // if (! openNodeCats.get(1).equals("OPNHIT")) {
+            constraints.add(distance);
+            // }
+            if (i == 0) {
+                if (direction.equals("plus")) {
+                    inOrder = true;
+                }
+                else if (direction.equals("minus")) {
+                    inOrder = true;
+                    invertedOperands = true;
+                }
+                else if (direction.equals("both")) {
+                    inOrder = false;
+                }
+            }
+        }
+        group.put("inOrder", inOrder);
+        LinkedHashMap<String, Object> embeddedSequence = group;
 
-		if (groupnode != null) {
-			String grouping = groupnode.getChild(0).toStringTree().equals("max") ? "true" : "false";
-			posOptions.put("grouping", grouping);
-		}
-		return posOptions;
-	}
+        if (!(openNodeCats.get(1).equals("OPBEG")
+                || openNodeCats.get(1).equals("OPEND") 
+                || inOPALL 
+                || openNodeCats.get(1).equals("OPNHIT"))) {
+            wrapOperandInClass(node, 1, classCounter);
+            wrapOperandInClass(node, 2, classCounter);
+            group = CqlfObjectGenerator.wrapInReference(group,
+                    128 + classCounter++);
+        }
+        else if (openNodeCats.get(1).equals("OPNHIT")) {
+            LinkedHashMap<String, Object> repetition = CqlfObjectGenerator
+                    .makeRepetition(min, max);
+            ((ArrayList<Object>) repetition.get("operands"))
+                    .add(CqlfObjectGenerator.makeToken());
+            // TODO go on with this: put the repetition into a class
+            // and put it in between the operands
+            // -> what if there's several distance constraints. with
+            // different keys, like /w4,s0?
+        }
 
-	@SuppressWarnings({ "unchecked" })
-	private LinkedHashMap<String,Object> wrap(LinkedHashMap[] wrapCascade) {
-		int i;
-		for (i=0; i<wrapCascade.length-1; i++) {
-			ArrayList<Object> containerOperands = (ArrayList<Object>) wrapCascade[i+1].get("operands");
-			containerOperands.add(0,wrapCascade[i]);
-		}
-		return wrapCascade[i];
-	}
+        LinkedHashMap<String, Object> sequence = null;
+        if (putIntoOverlapDisjunction) {
+            sequence = embeddedSequence;
+            group = CqlfObjectGenerator.makeGroup("or");
+            ArrayList<Object> disjOperands = 
+                    (ArrayList<Object>) group.get("operands");
+            String[] sharedClasses = new String[] { "intersects" };
+            LinkedHashMap<String, Object> overlapsGroup = CqlfObjectGenerator
+                    .makePosition(new String[0], sharedClasses);
 
-	LinkedList<ArrayList<Object>> nestedDistOperands = new LinkedList<ArrayList<Object>>();  
+            ArrayList<Object> overlapsOperands = 
+                    (ArrayList<Object>) overlapsGroup.get("operands");
+            // this ensures identity of the operands lists and thereby
+            // a distribution of the operands for both created objects
+            sequence.put("operands", overlapsOperands);
+            if (invertedOperands) {
+                invertedOperandsLists.push(overlapsOperands);
+            }
+            disjOperands.add(overlapsGroup);
+            disjOperands.add(CqlfObjectGenerator.wrapInReference(sequence, 0));
+            // Step II: decide where to put
+            putIntoSuperObject(group, 0);
+            objectStack.push(sequence);
+        }
+        else {
+            if (invertedOperands) {
+                ArrayList<Object> operands = 
+                        (ArrayList<Object>) embeddedSequence.get("operands");
+                invertedOperandsLists.push(operands);
+            }
+            // Step II: decide where to put
+            putIntoSuperObject(group, 0);
+            objectStack.push(embeddedSequence);
+        }
+        stackedObjects++;
+        visited.add(node.getChild(0));
+    }
 
-	@SuppressWarnings("unchecked")
-	private void putIntoSuperObject(LinkedHashMap<String, Object> object, int objStackPosition) {
-		if (objectStack.size() > objStackPosition) {
-			ArrayList<Object> topObjectOperands = (ArrayList<Object>) objectStack.get(objStackPosition).get("operands");
-			if (!invertedOperandsLists.contains(topObjectOperands)) {
-				topObjectOperands.add(object);
-			} else {
-				topObjectOperands.add(0, object);
-			}
+    private void processOPOR(Tree node) {
+        // Step I: create group
+        LinkedHashMap<String, Object> disjunction = new LinkedHashMap<String, Object>();
+        disjunction.put("@type", "korap:group");
+        disjunction.put("operation", "operation:or");
+        disjunction.put("operands", new ArrayList<Object>());
+        objectStack.push(disjunction);
+        stackedObjects++;
+        // Step II: decide where to put
+        putIntoSuperObject(disjunction, 1);
+    }
 
-		} else {
-			requestMap.put("query", object);
-		}
-	}
+    private void processOPAND_OPNOT(Tree node) {
+        // Step I: create group
+        String nodeCat = getNodeCat(node);
+        LinkedHashMap<String, Object> distgroup = new LinkedHashMap<String, Object>();
+        distgroup.put("@type", "korap:group");
+        distgroup.put("operation", "operation:sequence");
+        ArrayList<Object> distances = new ArrayList<Object>();
+        LinkedHashMap<String, Object> zerodistance = new LinkedHashMap<String, Object>();
+        zerodistance.put("@type", "cosmas:distance");
+        zerodistance.put("key", "t");
+        zerodistance.put("min", 0);
+        zerodistance.put("max", 0);
+        if (nodeCat.equals("OPNOT"))
+            zerodistance.put("exclude", true);
+        distances.add(zerodistance);
+        distgroup.put("distances", distances);
+        distgroup.put("operands", new ArrayList<Object>());
+        objectStack.push(distgroup);
+        stackedObjects++;
+        // Step II: decide where to put
+        putIntoSuperObject(distgroup, 1);
+    }
 
-	private void putIntoSuperObject(LinkedHashMap<String, Object> object) {
-		putIntoSuperObject(object, 0);
-	}
+    private void processOPLABEL(Tree node) {
+        // Step I: create element
+        LinkedHashMap<String, Object> elem = new LinkedHashMap<String, Object>();
+        elem.put("@type", "korap:span");
+        elem.put("key", node.getChild(0).toStringTree().replaceAll("<|>", ""));
+        // Step II: decide where to put
+        putIntoSuperObject(elem);
+    }
 
-	/**
-	 * Normalises position operators to equivalents using #BED  
-	 */
-	private String rewritePositionQuery(String q) {
-		Pattern p = Pattern.compile("(\\w+):((\\+|-)?(sa|se|pa|pe|ta|te),?)+");
-		Matcher m = p.matcher(q);
+    @SuppressWarnings("unchecked")
+    private void processOPELEM(Tree node) {
+        // Step I: create element
+        LinkedHashMap<String, Object> span = CqlfObjectGenerator.makeSpan();
+        if (node.getChild(0).toStringTree().equals("EMPTY")) {
 
-		String rewrittenQuery = q;
-		while (m.find()) {
-			String match = m.group();
-			String conditionsString = match.split(":")[1];
-			Pattern conditionPattern = Pattern.compile("(\\+|-)?(sa|se|pa|pe|ta|te)");
-			Matcher conditionMatcher = conditionPattern.matcher(conditionsString);
-			String replacement = "#BED(" + m.group(1) + " , ";
-			while (conditionMatcher.find()) {
-				replacement = replacement + conditionMatcher.group() + ",";
-			}
-			replacement = replacement.substring(0, replacement.length() - 1) + ")"; //remove trailing comma and close parenthesis
-			rewrittenQuery = rewrittenQuery.replace(match, replacement);
-		}
-		return rewrittenQuery;
-	}
+        }
+        else {
+            int elname = 0;
+            Tree elnameNode = getFirstChildWithCat(node, "ELNAME");
+            if (elnameNode != null) {
+                span.put("key", elnameNode.getChild(0).toStringTree()
+                        .toLowerCase());
+                elname = 1;
+            }
+            if (node.getChildCount() > elname) {
+                /*
+                 * Attributes can carry several values, like #ELEM(W
+                 * ANA != 'N V'), denoting a word whose POS is neither
+                 * N nor V. When seeing this, create a sub-termGroup
+                 * and put it into the top-level term group, but only
+                 * if there are other attributes in that group. If
+                 * not, put the several values as distinct
+                 * attr-val-pairs into the top-level group (in order
+                 * to avoid a top-level group that only contains a
+                 * sub-group).
+                 */
+                LinkedHashMap<String, Object> termGroup = 
+                        CqlfObjectGenerator.makeTermGroup("and");
+                ArrayList<Object> termGroupOperands = 
+                        (ArrayList<Object>) termGroup.get("operands");
+                for (int i = elname; i < node.getChildCount(); i++) {
+                    Tree attrNode = node.getChild(i);
+                    if (attrNode.getChildCount() == 2) {
+                        LinkedHashMap<String, Object> term = 
+                                CqlfObjectGenerator.makeTerm();
+                        termGroupOperands.add(term);
+                        String layer = attrNode.getChild(0).toStringTree();
+                        String[] splitted = layer.split("/");
+                        if (splitted.length > 1) {
+                            term.put("foundry", splitted[0]);
+                            layer = splitted[1];
+                        }
+                        term.put("layer", translateMorph(layer));
+                        term.put("key", attrNode.getChild(1).toStringTree());
+                        String match = getNodeCat(attrNode).equals("EQ") ? "eq":"ne";
+                        term.put("match", "match:" + match);
+                    }
+                    else {
+                        LinkedHashMap<String, Object> subTermGroup = CqlfObjectGenerator
+                                .makeTermGroup("and");
+                        ArrayList<Object> subTermGroupOperands = 
+                                (ArrayList<Object>) subTermGroup.get("operands");
+                        int j;
+                        for (j = 1; j < attrNode.getChildCount(); j++) {
+                            LinkedHashMap<String, Object> term = 
+                                    CqlfObjectGenerator.makeTerm();
+                            String layer = attrNode.getChild(0).toStringTree();
+                            String[] splitted = layer.split("/");
+                            if (splitted.length > 1) {
+                                term.put("foundry", splitted[0]);
+                                layer = splitted[1];
+                            }
+                            term.put("layer", translateMorph(layer));
+                            term.put("key", attrNode.getChild(j).toStringTree());
+                            String match = 
+                                    getNodeCat(attrNode).equals("EQ") ? "eq" : "ne";
+                            term.put("match", "match:" + match);
+                            if (node.getChildCount() == elname + 1) {
+                                termGroupOperands.add(term);
+                            }
+                            else {
+                                subTermGroupOperands.add(term);
+                            }
+                        }
+                        if (node.getChildCount() > elname + 1) {
+                            termGroupOperands.add(subTermGroup);
+                        }
+                    }
+                    if (getNodeCat(attrNode).equals("NOTEQ"))
+                        negate = true;
+                }
+                // possibly only one term was present throughout all
+                // nodes: extract it from the group
+                if (termGroupOperands.size() == 1) {
+                    termGroup = (LinkedHashMap<String, Object>) 
+                            termGroupOperands.get(0);
+                }
+                span.put("attr", termGroup);
+            }
+        }
+        // Step II: decide where to put
+        putIntoSuperObject(span);
+    }
 
-	private Tree parseCosmasQuery(String query) {
-		query = rewritePositionQuery(query);
-		Tree tree = null;
-		Antlr3DescriptiveErrorListener errorListener = new Antlr3DescriptiveErrorListener(query);
-		try {
-			ANTLRStringStream ss = new ANTLRStringStream(query);
-			c2psLexer lex = new c2psLexer(ss);
-			org.antlr.runtime.CommonTokenStream tokens = new org.antlr.runtime.CommonTokenStream(lex);  //v3
-			parser = new c2psParser(tokens);
-	
-			lex.setErrorReporter(errorListener); // Use the custom error reporter
-			((c2psParser) parser).setErrorReporter(errorListener); // Use the custom error reporter
-			c2psParser.c2ps_query_return c2Return = ((c2psParser) parser).c2ps_query();  // statt t().
-			// AST Tree anzeigen:
-			tree = (Tree) c2Return.getTree();
+    private void processOPMORPH(Tree node) {
+        // Step I: get info
+        String[] morphterms = 
+                node.getChild(0).toStringTree().replace(" ", "").split("&");
+        LinkedHashMap<String, Object> token = CqlfObjectGenerator.makeToken();
+        ArrayList<Object> terms = new ArrayList<Object>();
+        LinkedHashMap<String, Object> fieldMap = null;
+        for (String morphterm : morphterms) {
+            // regex group #2 is foundry, #4 layer, #5 operator #6
+            // key, #8 value
+            Pattern p = Pattern
+                    .compile("((\\w+)/)?((\\w*)(!?=))?(\\w+)(:(\\w+))?");
+            Matcher m = p.matcher(morphterm);
+            if (!m.matches()) {
+                addError(StatusCodes.UNKNOWN_QUERY_ERROR,
+                        "Something went wrong parsing the argument in MORPH().");
+                requestMap.put("query", new LinkedHashMap<String, Object>());
+                return;
+            }
 
-		} catch (RecognitionException e) {
-			log.error("Could not parse query. Please make sure it is well-formed.");
-			addError(StatusCodes.MALFORMED_QUERY, "Could not parse query. Please make sure it is well-formed.");
-		}
-		String treestring = tree.toStringTree();
-		
-		boolean erroneous = false;
-		if (parser.failed() || parser.getNumberOfSyntaxErrors() > 0) {
-			erroneous = true;
-			tree = null;
-		}
+            fieldMap = new LinkedHashMap<String, Object>();
+            fieldMap.put("@type", "korap:term");
 
-		if (erroneous || treestring.contains("<mismatched token") || 
-				treestring.contains("<error") || treestring.contains("<unexpected")) {
-			log.error(errorListener.generateFullErrorMsg().toString());
-			addError(errorListener.generateFullErrorMsg());
-		}
-		return tree;
-	}
+            if (m.group(2) != null)
+                fieldMap.put("foundry", m.group(2));
+            if (m.group(4) != null)
+                fieldMap.put("layer", m.group(4));
+            if (m.group(5) != null) {
+                if ("!=".equals(m.group(5)))
+                    negate = !negate;
+            }
+            if (m.group(6) != null)
+                fieldMap.put("key", m.group(6));
+            if (m.group(8) != null)
+                fieldMap.put("value", m.group(8));
+
+            // negate field (see above)
+            if (negate) {
+                fieldMap.put("match", "match:ne");
+            }
+            else {
+                fieldMap.put("match", "match:eq");
+            }
+            terms.add(fieldMap);
+        }
+        if (morphterms.length == 1) {
+            token.put("wrap", fieldMap);
+        }
+        else {
+            LinkedHashMap<String, Object> termGroup = 
+                    CqlfObjectGenerator.makeTermGroup("and");
+            termGroup.put("operands", terms);
+            token.put("wrap", termGroup);
+        }
+        // Step II: decide where to put
+        putIntoSuperObject(token, 0);
+        visited.add(node.getChild(0));
+    }
+
+    /**
+     * Nodes introducing tokens. Process all in the same manner,
+     * except for the fieldMap entry
+     * 
+     * @param node
+     */
+    private void processOPWF_OPLEM(Tree node) {
+        String nodeCat = getNodeCat(node);
+        // Step I: get info
+        LinkedHashMap<String, Object> token = 
+                new LinkedHashMap<String, Object>();
+        token.put("@type", "korap:token");
+        objectStack.push(token);
+        stackedObjects++;
+        LinkedHashMap<String, Object> fieldMap = 
+                new LinkedHashMap<String, Object>();
+        token.put("wrap", fieldMap);
+
+        fieldMap.put("@type", "korap:term");
+        // make category-specific fieldMap entry
+        String attr = nodeCat.equals("OPWF") ? "orth" : "lemma";
+        String value = node.getChild(0).toStringTree().replaceAll("\"", "");
+        // check for wildcard string
+        Pattern p = Pattern.compile("[+*?]");
+        Matcher m = p.matcher(value);
+        if (m.find())
+            fieldMap.put("type", "type:wildcard");
+
+        if (value.startsWith("$")) {
+            value = value.substring(1);
+            fieldMap.put("caseInsensitive", true);
+        }
+
+        fieldMap.put("key", value);
+        fieldMap.put("layer", attr);
+
+        // negate field (see above)
+        if (negate) {
+            fieldMap.put("match", "match:ne");
+        }
+        else {
+            fieldMap.put("match", "match:eq");
+        }
+        // Step II: decide where to put
+        if (!hasChild(node, "TPOS")) {
+            putIntoSuperObject(token, 1);
+            visited.add(node.getChild(0));
+        }
+        else {
+            // TODO
+        }
+    }
+
+    private void processSpanDistance(String meas, int min, int max) {
+        // Do stuff here in case we'll decide one day to treat span distances
+        // in a special way (see GDoc Special Distances Serialization).
+    }
+
+    /**
+     * Registers an entry in the {@link #operandWrap} table in order
+     * to allow an operator's arguments (or only one of them) to be
+     * wrapped in a class group.
+     * 
+     * @param node
+     *            The operator node (parent node of the ARG1/ARG2
+     *            node)
+     * @param arg
+     *            The argument number (1 or 2)
+     * @param cls
+     *            The class id.
+     */
+    private void wrapOperandInClass(Tree node, int arg, int cls) {
+        LinkedHashMap<String, Object> clsGroup = 
+                CqlfObjectGenerator.makeSpanClass(cls);
+        wrapOperand(node, arg, clsGroup);
+    }
+
+    /**
+     * Registers an entry in the {@link #operandWrap} table in order
+     * to allow an operator's arguments (or only one of them) to be
+     * wrapped in an arbitrary object, e.g. a reference group.
+     * 
+     * @param node
+     *            The operator node (parent node of the ARG1/ARG2
+     *            node)
+     * @param arg
+     *            The argument number (1 or 2)
+     * @param container
+     *            The object in whose operand list the argument shall
+     *            be wrapped.
+     */
+    private void wrapOperand(Tree node, int arg,
+            LinkedHashMap<String, Object> container) {
+        operandWrap.put(node, arg, container);
+    }
+
+    private Object translateMorph(String layer) {
+        // might be extended...
+        if (layer.equals("ANA"))
+            return ResourceMapper.descriptor2policy("ANA");
+        else
+            return layer;
+
+    }
+
+    @SuppressWarnings("unchecked")
+    /**
+     * Processes individual position conditions as provided in the OPTS node under the OPBEG node.
+     * #BEG allows to specify position constrains that apply to the beginning or the end of the subquery X.
+     * E.g., in #BEG(X, tpos/tpos), the 'tpos' constraints before the slash indicate conditions that apply 
+     * to the beginning of X, those after the slash are conditions that apply to the end of X.
+     * See the official C-II documentation for more details. <br/><br/>
+     * What's important here is what follows: <br/>
+     * Assume the query #BED(der Mann, sa/pa). This means that <b>the beginning<b/> of "der Mann" stands at
+     * the beginning of a sentence and that <b>the end</b> (because this constraint comes after the slash) stands at the 
+     * beginning of a paragraph. The "end" means the last item, here "Mann", so this token comes at the beginning
+     * of a paragraph. To capture this, we choose spanRefs: The last item of X matches the first item of the span (here: P). 
+     * @param cond
+     * @param distributedOperands
+     * @param mode
+     * @return
+     */
+    private LinkedHashMap<String, Object> processPositionCondition(Tree cond,
+            ArrayList<Object> distributedOperands, String mode) {
+        boolean negated = false;
+        String elem; // the 'span' (s/p/t)
+        String position = "frames:matches"; // default
+        // spanRef to be used for the element ('span')
+        Integer[] elemSpanRef = null;
+        // spanRef to be used for the subquery X
+        Integer[] hitSpanRef = null; 
+
+        String nodeString = cond.toStringTree();
+        if (nodeString.startsWith("-")) {
+            negated = true;
+            nodeString = nodeString.substring(1);
+        }
+        else if (nodeString.startsWith("+")) {
+            nodeString = nodeString.substring(1);
+        }
+        elem = nodeString.substring(0, 1);
+        nodeString = nodeString.substring(1);
+        // in cases where the end of X shall match the beginning of
+        // the span, or vice versa,
+        // we need to define spanRefs
+        if (mode.equals("beg")) {
+            if (nodeString.equals("a")) {
+                position = "frames:startswith";
+            }
+            else if (nodeString.equals("e")) {
+                hitSpanRef = new Integer[] { 0, 1 };
+                elemSpanRef = new Integer[] { -1, 1 };
+            }
+        }
+        else if (mode.equals("end")) {
+            if (nodeString.equals("e")) {
+                position = "frames:endswith";
+            }
+            else if (nodeString.equals("a")) {
+                hitSpanRef = new Integer[] { 0, 1 };
+                elemSpanRef = new Integer[] { -1, 1 };
+            }
+        }
+        // Create the position group and add the span and the subquery
+        // as operands, possibly wrapped in spanRefs
+        LinkedHashMap<String, Object> positionGroup = CqlfObjectGenerator
+                .makePosition(new String[] { position }, new String[0]);
+        if (negated)
+            positionGroup.put("exclude", true);
+        ArrayList<Object> posOperands = new ArrayList<Object>();
+        LinkedHashMap<String, Object> classGroup = 
+                CqlfObjectGenerator.makeSpanClass(classCounter++);
+        classGroup.put("operands", distributedOperands);
+        positionGroup.put("operands", posOperands);
+        LinkedHashMap<String, Object> span = new LinkedHashMap<String, Object>();
+        span.put("@type", "korap:span");
+        span.put("key", elem);
+        objectStack.push(classGroup);
+        if (hitSpanRef != null) {
+            LinkedHashMap<String, Object> spanRefAroundHit = 
+                    CqlfObjectGenerator.makeSpanReference(hitSpanRef, "focus");
+            ((ArrayList<Object>) spanRefAroundHit.get("operands"))
+                    .add(classGroup);
+         // re-assign after wrapping classGroup in spanRef
+            classGroup = spanRefAroundHit; 
+        }
+        if (elemSpanRef != null) {
+            LinkedHashMap<String, Object> spanRefAroundSpan = 
+                    CqlfObjectGenerator.makeSpanReference(elemSpanRef, "focus");
+            ((ArrayList<Object>) spanRefAroundSpan.get("operands")).add(span);
+         // re-assign after wrapping span in spanRef
+            span = spanRefAroundSpan; 
+        }
+        posOperands.add(span);
+        posOperands.add(classGroup);
+        return positionGroup;
+    }
+
+    private LinkedHashMap<String, Object> parseOPINOptions(Tree node) {
+        Tree posnode = getFirstChildWithCat(node, "POS");
+        Tree rangenode = getFirstChildWithCat(node, "RANGE");
+        Tree exclnode = getFirstChildWithCat(node, "EXCL");
+        Tree groupnode = getFirstChildWithCat(node, "GROUP");
+        boolean negatePosition = false;
+        LinkedHashMap<String, Object> posOptions = new LinkedHashMap<String, Object>();
+        ArrayList<String> positions = new ArrayList<String>();
+        ArrayList<String> classRefCheck = new ArrayList<String>();
+        posOptions.put("matchall", false);
+        String frame = "";
+        String posOption = null;
+        if (posnode != null) {
+            posOption = posnode.getChild(0).toStringTree();
+            switch (posOption) {
+                case "L":
+                    positions.add("frames:startswith");
+                    classRefCheck.add("classRefCheck:includes");
+                    frame = "startswith";
+                    break;
+                case "R":
+                    positions.add("frames:endswith");
+                    classRefCheck.add("classRefCheck:includes");
+                    frame = "endswith";
+                    break;
+                case "F":
+                    positions.add("frames:matches");
+                    classRefCheck.add("classRefCheck:includes");
+                    frame = "matches";
+                    break;
+                case "FE":
+                    positions.add("frames:matches");
+                    classRefCheck.add("classRefCheck:equals");
+                    frame = "matches";
+                    break;
+                case "FI":
+                    positions.add("frames:matches");
+                    classRefCheck.add("classRefCheck:unequals");
+                    classRefCheck.add("classRefCheck:includes");
+                    frame = "matches-noident";
+                    break;
+                case "N":
+                    positions.add("frames:contains");
+                    classRefCheck.add("classRefCheck:includes");
+                    frame = "contains";
+                    break;
+            }
+        }
+        else {
+            classRefCheck.add("classRefCheck:includes");
+            frame = "contains";
+        }
+        posOptions.put("frames", positions);
+        posOptions.put("classRefCheck", classRefCheck);
+        posOptions.put("frame", "frame:" + frame);
+        addMessage(
+                StatusCodes.DEPRECATED_QUERY_ELEMENT,
+                "Deprecated 2014-09-22: 'frame' only to be supported until 3 months from deprecation date. "
+                        + "Position frames are now expressed through 'frames' and 'sharedClasses'");
+
+        if (exclnode != null) {
+            if (exclnode.getChild(0).toStringTree().equals("YES")) {
+                negatePosition = !negatePosition;
+            }
+        }
+
+        if (rangenode != null) {
+            String range = rangenode.getChild(0).toStringTree().toLowerCase();
+            if (range.equals("all")) {
+                posOptions.put("matchall", true);
+                // LinkedHashMap<String,Object> ref =
+                // makeResetReference(); // reset all defined classes
+                // wrapOperand(node,2,ref);
+            }
+        }
+
+        if (negatePosition) {
+            posOptions.put("exclude", "true");
+        }
+
+        if (groupnode != null) {
+            String grouping = groupnode.getChild(0).toStringTree()
+                    .equals("max") ? "true" : "false";
+            posOptions.put("grouping", grouping);
+        }
+        return posOptions;
+    }
+
+    private LinkedHashMap<String, Object> parseOPOVOptions(Tree node) {
+        boolean negatePosition = false;
+        Tree posnode = getFirstChildWithCat(node, "POS");
+        Tree rangenode = getFirstChildWithCat(node, "RANGE");
+        Tree exclnode = getFirstChildWithCat(node, "EXCL");
+        Tree groupnode = getFirstChildWithCat(node, "GROUP");
+        LinkedHashMap<String, Object> posOptions = new LinkedHashMap<String, Object>();
+        ArrayList<String> positions = new ArrayList<String>();
+        ArrayList<String> classRefCheck = new ArrayList<String>();
+        posOptions.put("matchall", false);
+        String frame = "";
+        String posOption = null;
+        if (posnode != null) {
+            posOption = posnode.getChild(0).toStringTree();
+            switch (posOption) {
+                case "L":
+                    positions.add("frames:startswith");
+                    positions.add("frames:overlapsLeft");
+                    classRefCheck.add("classRefCheck:intersects");
+                    frame = "overlapsLeft";
+                    break;
+                case "R":
+                    positions.add("frames:endswith");
+                    positions.add("frames:overlapsRight");
+                    classRefCheck.add("classRefCheck:intersects");
+                    frame = "overlapsRight";
+                    break;
+                case "F":
+                    positions.add("frames:matches");
+                    classRefCheck.add("classRefCheck:intersects");
+                    frame = "matches";
+                    break;
+                case "FE":
+                    positions.add("frames:matches");
+                    classRefCheck.add("classRefCheck:equals");
+                    frame = "matches";
+                    break;
+                case "FI":
+                    positions.add("frames:matches");
+                    classRefCheck.add("classRefCheck:unequals");
+                    frame = "matches-noident";
+                    break;
+                case "X":
+                    positions.add("frames:contains");
+                    classRefCheck.add("classRefCheck:intersects");
+                    frame = "overlaps";
+                    break;
+            }
+        }
+        else {
+            classRefCheck.add("classRefCheck:intersects");
+            frame = "overlaps";
+        }
+
+        posOptions.put("frames", positions);
+        posOptions.put("classRefCheck", classRefCheck);
+        posOptions.put("frame", "frame:" + frame);
+        addMessage(
+                StatusCodes.DEPRECATED_QUERY_ELEMENT,
+                "Deprecated 2014-09-22: 'frame' only to be supported until 3 months from deprecation date. "
+                        + "Position frames are now expressed through 'frames' and 'sharedClasses'");
+
+        if (exclnode != null) {
+            if (exclnode.getChild(0).toStringTree().equals("YES")) {
+                negatePosition = !negatePosition;
+            }
+        }
+
+        if (rangenode != null) {
+            String range = rangenode.getChild(0).toStringTree().toLowerCase();
+            if (range.equals("all")) {
+                posOptions.put("matchall", true);
+                // LinkedHashMap<String,Object> ref =
+                // makeResetReference(); // reset all defined classes
+                // wrapOperand(node,2,ref);
+            }
+        }
+
+        if (negatePosition) {
+            posOptions.put("exclude", "true");
+        }
+
+        if (groupnode != null) {
+            String grouping = groupnode.getChild(0).toStringTree()
+                    .equals("max") ? "true" : "false";
+            posOptions.put("grouping", grouping);
+        }
+        return posOptions;
+    }
+
+    @SuppressWarnings({ "unchecked" })
+    private LinkedHashMap<String, Object> wrap(LinkedHashMap[] wrapCascade) {
+        int i;
+        for (i = 0; i < wrapCascade.length - 1; i++) {
+            ArrayList<Object> containerOperands = (ArrayList<Object>) wrapCascade[i + 1]
+                    .get("operands");
+            containerOperands.add(0, wrapCascade[i]);
+        }
+        return wrapCascade[i];
+    }
+
+    @SuppressWarnings("unchecked")
+    private void putIntoSuperObject(LinkedHashMap<String, Object> object,
+            int objStackPosition) {
+        if (objectStack.size() > objStackPosition) {
+            ArrayList<Object> topObjectOperands = (ArrayList<Object>) objectStack
+                    .get(objStackPosition).get("operands");
+            if (!invertedOperandsLists.contains(topObjectOperands)) {
+                topObjectOperands.add(object);
+            }
+            else {
+                topObjectOperands.add(0, object);
+            }
+        }
+        else {
+            requestMap.put("query", object);
+        }
+    }
+
+    private void putIntoSuperObject(LinkedHashMap<String, Object> object) {
+        putIntoSuperObject(object, 0);
+    }
+
+    /**
+     * Normalises position operators to equivalents using #BED
+     */
+    private String rewritePositionQuery(String q) {
+        Pattern p = Pattern.compile("(\\w+):((\\+|-)?(sa|se|pa|pe|ta|te),?)+");
+        Matcher m = p.matcher(q);
+
+        String rewrittenQuery = q;
+        while (m.find()) {
+            String match = m.group();
+            String conditionsString = match.split(":")[1];
+            Pattern conditionPattern = Pattern
+                    .compile("(\\+|-)?(sa|se|pa|pe|ta|te)");
+            Matcher conditionMatcher = conditionPattern
+                    .matcher(conditionsString);
+            String replacement = "#BED(" + m.group(1) + " , ";
+            while (conditionMatcher.find()) {
+                replacement = replacement + conditionMatcher.group() + ",";
+            }
+            // remove trailing comma and close parenthesis
+            replacement = 
+                    replacement.substring(0, replacement.length() - 1)+ ")"; 
+            rewrittenQuery = rewrittenQuery.replace(match, replacement);
+        }
+        return rewrittenQuery;
+    }
+
+    private Tree parseCosmasQuery(String query) {
+        query = rewritePositionQuery(query);
+        Tree tree = null;
+        Antlr3DescriptiveErrorListener errorListener = 
+                new Antlr3DescriptiveErrorListener(query);
+        try {
+            ANTLRStringStream ss = new ANTLRStringStream(query);
+            c2psLexer lex = new c2psLexer(ss);
+            org.antlr.runtime.CommonTokenStream tokens = 
+                    new org.antlr.runtime.CommonTokenStream(lex); // v3
+            parser = new c2psParser(tokens);
+            // Use custom error reporters
+            lex.setErrorReporter(errorListener);
+            ((c2psParser) parser).setErrorReporter(errorListener); 
+            c2psParser.c2ps_query_return c2Return = ((c2psParser) parser)
+                    .c2ps_query(); // statt t().
+            // AST Tree anzeigen:
+            tree = (Tree) c2Return.getTree();
+
+        }
+        catch (RecognitionException e) {
+            log.error("Could not parse query. Please make sure it is well-formed.");
+            addError(StatusCodes.MALFORMED_QUERY,
+                    "Could not parse query. Please make sure it is well-formed.");
+        }
+        String treestring = tree.toStringTree();
+
+        boolean erroneous = false;
+        if (parser.failed() || parser.getNumberOfSyntaxErrors() > 0) {
+            erroneous = true;
+            tree = null;
+        }
+
+        if (erroneous || treestring.contains("<mismatched token")
+                || treestring.contains("<error")
+                || treestring.contains("<unexpected")) {
+            log.error(errorListener.generateFullErrorMsg().toString());
+            addError(errorListener.generateFullErrorMsg());
+        }
+        return tree;
+    }
 }
\ No newline at end of file
diff --git a/src/main/java/de/ids_mannheim/korap/query/serialize/CqlQueryProcessor.java b/src/main/java/de/ids_mannheim/korap/query/serialize/CqlQueryProcessor.java
index 7e76964..9064a0e 100644
--- a/src/main/java/de/ids_mannheim/korap/query/serialize/CqlQueryProcessor.java
+++ b/src/main/java/de/ids_mannheim/korap/query/serialize/CqlQueryProcessor.java
@@ -12,7 +12,7 @@
 
 /**
  * @author margaretha
- * @date 	09.05.14
+ * @date 09.05.14
  */
 public class CqlQueryProcessor extends AbstractQueryProcessor {
 
@@ -22,7 +22,10 @@
     private static final String INDEX_WORDS = "words";
     private static final String TERM_RELATION_CQL_1_1 = "scr";
     private static final String TERM_RELATION_CQL_1_2 = "=";
-    private static final String SUPPORTED_RELATION_EXACT = "exact"; // not in the doc    
+    private static final String SUPPORTED_RELATION_EXACT = "exact"; // not
+                                                                    // in
+                                                                    // the
+                                                                    // doc
     private static final String OPERATION_OR = "operation:or";
     private static final String OPERATION_SEQUENCE = "operation:sequence";
     private static final String OPERATION_POSITION = "operation:position";
@@ -31,16 +34,17 @@
     private LinkedHashMap<String, Object> requestMap;
     private String version;
     private boolean isCaseSensitive; // default true
-    
-    public CqlQueryProcessor(String query) {
+
+    public CqlQueryProcessor (String query) {
         this(query, VERSION_1_2, true);
     }
 
-    public CqlQueryProcessor(String query, String version) {
-        this(query, version, true);                
+    public CqlQueryProcessor (String query, String version) {
+        this(query, version, true);
     }
 
-    public CqlQueryProcessor(String query, String version, boolean isCaseSensitive) {
+    public CqlQueryProcessor (String query, String version,
+            boolean isCaseSensitive) {
         this.version = version;
         this.isCaseSensitive = isCaseSensitive;
         this.requestMap = new LinkedHashMap<>();
@@ -48,41 +52,41 @@
         process(query);
     }
 
-
     @Override
     public Map<String, Object> getRequestMap() {
         return this.requestMap;
     }
 
     @Override
-    public void process(String query) {    	
-    	 if ((query == null) || query.isEmpty()) 
-             addError(StatusCodes.MALFORMED_QUERY, "SRU diagnostic 27: An empty query is unsupported.");
-    	
+    public void process(String query) {
+        if ((query == null) || query.isEmpty())
+            addError(StatusCodes.MALFORMED_QUERY,
+                    "SRU diagnostic 27: An empty query is unsupported.");
+
         CQLNode cqlNode = parseQuerytoCQLNode(query);
-        Map<String,Object> queryMap = parseCQLNode(cqlNode);
+        Map<String, Object> queryMap = parseCQLNode(cqlNode);
         requestMap.put("query", queryMap);
-        //requestMap.put("query", sentenceWrapper(queryMap));
+        // requestMap.put("query", sentenceWrapper(queryMap));
     }
 
-    private Map<String,Object> sentenceWrapper(Map<String,Object> m){
-    	Map<String, Object> map = new LinkedHashMap<String,Object>();    	
+    private Map<String, Object> sentenceWrapper(Map<String, Object> m) {
+        Map<String, Object> map = new LinkedHashMap<String, Object>();
         map.put("@type", "korap:group");
         map.put("operation", OPERATION_POSITION);
         map.put("frame", "frame:contains");
-        
-        Map<String, Object> sentence = new LinkedHashMap<String,Object>();    	
+
+        Map<String, Object> sentence = new LinkedHashMap<String, Object>();
         sentence.put("@type", "korap:span");
         sentence.put("key", "s");
-        
+
         List<Map<String, Object>> list = new ArrayList<Map<String, Object>>();
         list.add(sentence);
         list.add(m);
-        map.put("operands", list);        
-        
-    	return map;
+        map.put("operands", list);
+
+        return map;
     }
-    
+
     private CQLNode parseQuerytoCQLNode(String query) {
         try {
             int compat = -1;
@@ -95,140 +99,153 @@
             }
             return new CQLParser(compat).parse(query);
 
-        } catch (CQLParseException | IOException e) {
+        }
+        catch (CQLParseException | IOException e) {
             addError(StatusCodes.MALFORMED_QUERY, "Error parsing CQL");
             return null;
         }
     }
 
-    private Map<String,Object> parseCQLNode(CQLNode node) {    	
-    	    	
+    private Map<String, Object> parseCQLNode(CQLNode node) {
+
         if (node instanceof CQLTermNode) {
             return parseTermNode((CQLTermNode) node);
-        } else if (node instanceof CQLAndNode) {
+        }
+        else if (node instanceof CQLAndNode) {
             return parseAndNode((CQLAndNode) node);
-        } else if (node instanceof CQLOrNode) {
+        }
+        else if (node instanceof CQLOrNode) {
             return parseOrNode((CQLOrNode) node);
-        } else {
-            addError(StatusCodes.UNKNOWN_QUERY_ELEMENT, "SRU diagnostic 48: Only basic search including term-only " +
-                    "and boolean (AND,OR) operator queries are currently supported.");
-            return new LinkedHashMap<String,Object>();
+        }
+        else {
+            addError(
+                    StatusCodes.UNKNOWN_QUERY_ELEMENT,
+                    "SRU diagnostic 48: Only basic search including term-only "
+                            + "and boolean (AND,OR) operator queries are currently supported.");
+            return new LinkedHashMap<String, Object>();
         }
     }
 
-    private Map<String,Object> parseTermNode(CQLTermNode node) {
+    private Map<String, Object> parseTermNode(CQLTermNode node) {
         checkTermNode(node);
         final String term = node.getTerm();
         if ((term == null) || term.isEmpty()) {
-            addError(StatusCodes.NO_QUERY, "SRU diagnostic 27: An empty term is unsupported.");
-            return new LinkedHashMap<String,Object>();
-        } else if (term.contains(" ")) {
+            addError(StatusCodes.NO_QUERY,
+                    "SRU diagnostic 27: An empty term is unsupported.");
+            return new LinkedHashMap<String, Object>();
+        }
+        else if (term.contains(" ")) {
             return writeSequence(term);
-        } else {
+        }
+        else {
             return writeTerm(term);
         }
     }
 
-    private Map<String,Object> parseAndNode(CQLAndNode node) {
+    private Map<String, Object> parseAndNode(CQLAndNode node) {
         checkBooleanModifier(node);
-        
-        Map<String, Object> map = new LinkedHashMap<String,Object>();
+
+        Map<String, Object> map = new LinkedHashMap<String, Object>();
         map.put("@type", "korap:group");
         map.put("operation", OPERATION_SEQUENCE);
         map.put("inOrder", false);
-        
+
         List<Map<String, Object>> list = new ArrayList<Map<String, Object>>();
-        Map<String, Object> distanceMap = new LinkedHashMap<String,Object>();
+        Map<String, Object> distanceMap = new LinkedHashMap<String, Object>();
         distanceMap.put("@type", "korap:distance");
         distanceMap.put("key", "s");
         distanceMap.put("min", "0");
         distanceMap.put("max", "0");
         list.add(distanceMap);
         map.put("distances", list);
-        
-        List<Map<String, Object>> operandList = new ArrayList<Map<String, Object>>();        
+
+        List<Map<String, Object>> operandList = new ArrayList<Map<String, Object>>();
         operandList.add(parseCQLNode(node.getLeftOperand()));
         operandList.add(parseCQLNode(node.getRightOperand()));
         map.put("operands", operandList);
-        
+
         return map;
     }
 
-    private Map<String,Object> parseOrNode(CQLOrNode node) {
-    	checkBooleanModifier(node);
-    	
-    	Map<String, Object> map = new LinkedHashMap<String,Object>();
-    	map.put("@type", "korap:group");
+    private Map<String, Object> parseOrNode(CQLOrNode node) {
+        checkBooleanModifier(node);
+
+        Map<String, Object> map = new LinkedHashMap<String, Object>();
+        map.put("@type", "korap:group");
         map.put("operation", OPERATION_OR);
-        
+
         List<Map<String, Object>> list = new ArrayList<Map<String, Object>>();
         list.add(parseCQLNode(node.getLeftOperand()));
         list.add(parseCQLNode(node.getRightOperand()));
         map.put("operands", list);
-        
-    	return map;
+
+        return map;
     }
 
     private Map<String, Object> writeSequence(String str) {
-    	Map<String, Object> sequenceMap = new LinkedHashMap<String,Object>();       
+        Map<String, Object> sequenceMap = new LinkedHashMap<String, Object>();
         sequenceMap.put("@type", "korap:group");
         sequenceMap.put("operation", OPERATION_SEQUENCE);
-        
+
         List<Map<String, Object>> termList = new ArrayList<Map<String, Object>>();
         String[] terms = str.split(" ");
-        for (String term : terms){
-        	termList.add(writeTerm(term));
+        for (String term : terms) {
+            termList.add(writeTerm(term));
         }
         sequenceMap.put("operands", termList);
-        
+
         return sequenceMap;
     }
 
-    private Map<String, Object> writeTerm(String term) {   	
-    	Map<String, Object> map = new LinkedHashMap<String,Object>();
-    	map.put("@type", "korap:term");
-    	if (!isCaseSensitive) {
-    		map.put("caseInsensitive","true");
-    	}
-    	map.put("key", term);
-    	map.put("layer", "orth");
-    	map.put("match", "match:eq");
-    	
-    	Map<String, Object> tokenMap = new LinkedHashMap<String,Object>();    	
-    	tokenMap.put("@type", "korap:token");    	
-    	tokenMap.put("wrap", map);
-    	return tokenMap;
+    private Map<String, Object> writeTerm(String term) {
+        Map<String, Object> map = new LinkedHashMap<String, Object>();
+        map.put("@type", "korap:term");
+        if (!isCaseSensitive) {
+            map.put("caseInsensitive", "true");
+        }
+        map.put("key", term);
+        map.put("layer", "orth");
+        map.put("match", "match:eq");
+
+        Map<String, Object> tokenMap = new LinkedHashMap<String, Object>();
+        tokenMap.put("@type", "korap:token");
+        tokenMap.put("wrap", map);
+        return tokenMap;
     }
 
     private void checkBooleanModifier(CQLBooleanNode node) {
         List<Modifier> modifiers = node.getModifiers();
         if ((modifiers != null) && !modifiers.isEmpty()) {
             Modifier modifier = modifiers.get(0);
-            addError(105, "SRU diagnostic 20: Relation modifier " +
-                    modifier.toCQL() + " is not supported.");
+            addError(105,
+                    "SRU diagnostic 20: Relation modifier " + modifier.toCQL()
+                            + " is not supported.");
         }
     }
 
     private void checkTermNode(CQLTermNode node) {
         // only allow "cql.serverChoice" and "words" index
-        if (!(INDEX_CQL_SERVERCHOICE.equals(node.getIndex()) ||
-                INDEX_WORDS.equals(node.getIndex()))) {
-            addError(105, "SRU diagnostic 16: Index " + node.getIndex() + " is not supported.");
+        if (!(INDEX_CQL_SERVERCHOICE.equals(node.getIndex()) || INDEX_WORDS
+                .equals(node.getIndex()))) {
+            addError(105, "SRU diagnostic 16: Index " + node.getIndex()
+                    + " is not supported.");
         }
         // only allow "=" relation without any modifiers
         CQLRelation relation = node.getRelation();
         String baseRel = relation.getBase();
-        if (!(TERM_RELATION_CQL_1_1.equals(baseRel) ||
-                TERM_RELATION_CQL_1_2.equals(baseRel) ||
-                SUPPORTED_RELATION_EXACT.equals(baseRel))) {
-            addError(105, "SRU diagnostic 19: Relation " +
-                    relation.getBase() + " is not supported.");
+        if (!(TERM_RELATION_CQL_1_1.equals(baseRel)
+                || TERM_RELATION_CQL_1_2.equals(baseRel) || SUPPORTED_RELATION_EXACT
+                    .equals(baseRel))) {
+            addError(105, "SRU diagnostic 19: Relation " + relation.getBase()
+                    + " is not supported.");
         }
         List<Modifier> modifiers = relation.getModifiers();
         if ((modifiers != null) && !modifiers.isEmpty()) {
             Modifier modifier = modifiers.get(0);
-            addError(105, "SRU diagnostic 20: Relation modifier " +
-                    modifier.getValue() + " is not supported.");
+            addError(
+                    105,
+                    "SRU diagnostic 20: Relation modifier "
+                            + modifier.getValue() + " is not supported.");
         }
     }
 
diff --git a/src/main/java/de/ids_mannheim/korap/query/serialize/DummyQueryProcessor.java b/src/main/java/de/ids_mannheim/korap/query/serialize/DummyQueryProcessor.java
index 6c49737..5ce7a0c 100644
--- a/src/main/java/de/ids_mannheim/korap/query/serialize/DummyQueryProcessor.java
+++ b/src/main/java/de/ids_mannheim/korap/query/serialize/DummyQueryProcessor.java
@@ -1,16 +1,18 @@
 package de.ids_mannheim.korap.query.serialize;
 
 /**
- * This class serves as a dummy class for reporting errors when the query or query language as specified 
- * in the {@link #QuerySerializer} are empty or erroneous. Without instatiating a class, errors/warnings cannot
- * be reported.  
+ * This class serves as a dummy class for reporting errors when the
+ * query or query language as specified in the
+ * {@link #QuerySerializer} are empty or erroneous. Without
+ * instatiating a class, errors/warnings cannot be reported.
+ * 
  * @author bingel
  *
  */
 public final class DummyQueryProcessor extends AbstractQueryProcessor {
 
-	@Override
-	public void process(String query) {
-		// This is just a dummy class. Do nothing!		
-	}
+    @Override
+    public void process(String query) {
+        // This is just a dummy class. Do nothing!
+    }
 }
diff --git a/src/main/java/de/ids_mannheim/korap/query/serialize/IdWriter.java b/src/main/java/de/ids_mannheim/korap/query/serialize/IdWriter.java
index 77dfafc..70d6a31 100644
--- a/src/main/java/de/ids_mannheim/korap/query/serialize/IdWriter.java
+++ b/src/main/java/de/ids_mannheim/korap/query/serialize/IdWriter.java
@@ -9,15 +9,15 @@
 /**
  * @author hanl
  * @date 04/06/2014
- * <p/>
- * create idn for korap:token
+ *       <p/>
+ *       create idn for korap:token
  */
 public class IdWriter {
 
     private JsonNode node;
     private int counter;
 
-    public IdWriter(String json) {
+    public IdWriter (String json) {
         node = JsonUtils.readTree(json);
         counter = 0;
     }
@@ -32,7 +32,8 @@
             Iterator<JsonNode> operands = node.path("operands").elements();
             while (operands.hasNext())
                 process(operands.next());
-        } else if (node.path("@type").asText().equals("korap:token"))
+        }
+        else if (node.path("@type").asText().equals("korap:token"))
             addId(node);
     }
 
diff --git a/src/main/java/de/ids_mannheim/korap/query/serialize/MetaQueryBuilder.java b/src/main/java/de/ids_mannheim/korap/query/serialize/MetaQueryBuilder.java
index d30fd6d..85b7a0e 100644
--- a/src/main/java/de/ids_mannheim/korap/query/serialize/MetaQueryBuilder.java
+++ b/src/main/java/de/ids_mannheim/korap/query/serialize/MetaQueryBuilder.java
@@ -16,7 +16,7 @@
     private Map meta;
     private SpanContext spanContext;
 
-    public MetaQueryBuilder() {
+    public MetaQueryBuilder () {
         this.meta = new LinkedHashMap();
     }
 
@@ -31,7 +31,7 @@
      * @return
      */
     public MetaQueryBuilder setSpanContext(Integer left, String leftType,
-                                           Integer right, String rightType) {
+            Integer right, String rightType) {
         this.spanContext = new SpanContext(left, leftType, right, rightType);
         return this;
     }
@@ -41,8 +41,9 @@
     }
 
     /**
-     * context if of type paragraph or sentence where left and right size delimiters are irrelevant; or 2-token, 2-char
-     * p/paragraph, s/sentence or token, char
+     * context if of type paragraph or sentence where left and right
+     * size delimiters are irrelevant; or 2-token, 2-char p/paragraph,
+     * s/sentence or token, char
      *
      * @param context
      * @return
@@ -51,17 +52,17 @@
         if (context.startsWith("s") | context.startsWith("p"))
             this.spanContext = new SpanContext(context);
         else {
-            String[] ct = context.replaceAll("\\s+","").split(",");
+            String[] ct = context.replaceAll("\\s+", "").split(",");
             String[] lc = ct[0].split("-");
             String[] rc = ct[1].split("-");
-            this.spanContext = new SpanContext(Integer.valueOf(lc[0]), lc[1], Integer.valueOf(rc[0]), rc[1]);
+            this.spanContext = new SpanContext(Integer.valueOf(lc[0]), lc[1],
+                    Integer.valueOf(rc[0]), rc[1]);
         }
         return this;
     }
 
     public MetaQueryBuilder fillMeta(Integer pageIndex, Integer pageInteger,
-                                     Integer pageLength,
-                                     String ctx, Boolean cutoff) {
+            Integer pageLength, String ctx, Boolean cutoff) {
         if (pageIndex != null)
             this.addEntry("startIndex", pageIndex);
         if (pageIndex == null && pageInteger != null)
@@ -75,7 +76,6 @@
         return this;
     }
 
-
     public MetaQueryBuilder addEntry(String name, Object value) {
         meta.put(name, value);
         return this;
@@ -95,7 +95,6 @@
         private int right_size;
         private String context = null;
 
-
         /**
          * context segment if context is either of type char or token.
          * size can differ for left and right span
@@ -106,14 +105,14 @@
          * @param rt
          * @return
          */
-        public SpanContext(int ls, String lt, int rs, String rt) {
+        public SpanContext (int ls, String lt, int rs, String rt) {
             this.left_type = lt;
             this.left_size = ls;
             this.right_type = rt;
             this.right_size = rs;
         }
 
-        public SpanContext(String context) {
+        public SpanContext (String context) {
             this.context = context;
         }
 
@@ -130,7 +129,8 @@
                 r.add(this.right_size);
                 map.put("right", r);
                 meta.put("context", map);
-            } else
+            }
+            else
                 meta.put("context", this.context);
             return meta;
         }
diff --git a/src/main/java/de/ids_mannheim/korap/query/serialize/PoliqarpPlusQueryProcessor.java b/src/main/java/de/ids_mannheim/korap/query/serialize/PoliqarpPlusQueryProcessor.java
index 0da9e5c..3ccd297 100644
--- a/src/main/java/de/ids_mannheim/korap/query/serialize/PoliqarpPlusQueryProcessor.java
+++ b/src/main/java/de/ids_mannheim/korap/query/serialize/PoliqarpPlusQueryProcessor.java
@@ -21,763 +21,887 @@
  */
 public class PoliqarpPlusQueryProcessor extends Antlr4AbstractQueryProcessor {
 
-	private static Logger log = LoggerFactory.getLogger(PoliqarpPlusQueryProcessor.class);
-	private int classCounter = 128;
+    private static Logger log = LoggerFactory
+            .getLogger(PoliqarpPlusQueryProcessor.class);
+    private int classCounter = 128;
 
-	/**
-	 * Most centrally, this class maintains a set of nested maps and lists which represent the JSON tree, which is built by the JSON serialiser
-	 * on basis of the {@link #requestMap} at the root of the tree.
-	 * <br/>
-	 * The class further maintains a set of stacks which effectively keep track of which objects to embed in which containing objects.
-	 *
-	 * @param query The syntax tree as returned by ANTLR
-	 * @throws QueryException
-	 */
-	public PoliqarpPlusQueryProcessor(String query) {
-		CqlfObjectGenerator.setQueryProcessor(this);
-		process(query);
-		log.info(">>> " + requestMap.get("query") + " <<<");
-	}
+    /**
+     * Most centrally, this class maintains a set of nested maps and
+     * lists which represent the JSON tree, which is built by the JSON
+     * serialiser on basis of the {@link #requestMap} at the root of
+     * the tree. <br/>
+     * The class further maintains a set of stacks which effectively
+     * keep track of which objects to embed in which containing
+     * objects.
+     *
+     * @param query
+     *            The syntax tree as returned by ANTLR
+     * @throws QueryException
+     */
+    public PoliqarpPlusQueryProcessor (String query) {
+        CqlfObjectGenerator.setQueryProcessor(this);
+        process(query);
+        log.info(">>> " + requestMap.get("query") + " <<<");
+    }
 
-	@Override
-	public void process(String query) {
-		ParseTree tree;
-		tree = parsePoliqarpQuery(query);
-		super.parser = this.parser;
-		log.info("Processing PoliqarpPlus query: "+query);
-		if (tree != null) {
-			log.debug("ANTLR parse tree: "+tree.toStringTree(parser));
-			processNode(tree);
-		} else {
-			addError(StatusCodes.MALFORMED_QUERY, "Could not parse query >>> "+query+" <<<.");
-		}
-	}
+    @Override
+    public void process(String query) {
+        ParseTree tree;
+        tree = parsePoliqarpQuery(query);
+        super.parser = this.parser;
+        log.info("Processing PoliqarpPlus query: " + query);
+        if (tree != null) {
+            log.debug("ANTLR parse tree: " + tree.toStringTree(parser));
+            processNode(tree);
+        }
+        else {
+            addError(StatusCodes.MALFORMED_QUERY, "Could not parse query >>> "
+                    + query + " <<<.");
+        }
+    }
 
-	/**
-	 * Recursively calls itself with the children of the currently active node, traversing the tree nodes in a top-down, depth-first fashion.
-	 * A list is maintained that contains all visited nodes
-	 * which have been directly addressed by their (grand-/grand-grand-/...) parent nodes, such that some processing time is saved, as these node will
-	 * not be processed. This method is effectively a list of if-statements that are responsible for treating the different node types correctly and filling the
-	 * respective maps/lists.
-	 *
-	 * @param node The currently processed node. The process(String query) method calls this method with the root.
-	 * @throws QueryException
-	 */
-	private void processNode(ParseTree node) {
-		// Top-down processing
-		if (visited.contains(node)) return;
-		else visited.add(node);
+    /**
+     * Recursively calls itself with the children of the currently
+     * active node, traversing the tree nodes in a top-down,
+     * depth-first fashion. A list is maintained that contains all
+     * visited nodes which have been directly addressed by their
+     * (grand-/grand-grand-/...) parent nodes, such that some
+     * processing time is saved, as these node will not be processed.
+     * This method is effectively a list of if-statements that are
+     * responsible for treating the different node types correctly and
+     * filling the respective maps/lists.
+     *
+     * @param node
+     *            The currently processed node. The process(String
+     *            query) method calls this method with the root.
+     * @throws QueryException
+     */
+    private void processNode(ParseTree node) {
+        // Top-down processing
+        if (visited.contains(node))
+            return;
+        else
+            visited.add(node);
 
-		currentNode = node;
+        currentNode = node;
 
-		String nodeCat = getNodeCat(node);
-		openNodeCats.push(nodeCat);
+        String nodeCat = getNodeCat(node);
+        openNodeCats.push(nodeCat);
 
-		stackedObjects = 0;
+        stackedObjects = 0;
 
-		if (verbose) {
-			System.err.println(" " + objectStack);
-			System.out.println(openNodeCats);
-		}
+        if (verbose) {
+            System.err.println(" " + objectStack);
+            System.out.println(openNodeCats);
+        }
 
-		/*
-		 ****************************************************************
-		 **************************************************************** 
-		 * 			Processing individual node categories  				*
-		 ****************************************************************
-		 ****************************************************************
-		 */
-				
-		if (nodeCat.equals("segment")) {
-			processSegment(node);
-		}
-		
-		if (nodeCat.equals("sequence")) {
-			processSequence(node);
-		}
+        /*
+         * ***************************************************************
+         * ***************************************************************
+         * *********** Processing individual node categories *************
+         * ***************************************************************
+         * ***************************************************************
+         */
 
-		if (nodeCat.equals("emptyTokenSequence")) {
-			processEmptyTokenSequence(node);
-		}
+        if (nodeCat.equals("segment")) {
+            processSegment(node);
+        }
 
-		if (nodeCat.equals("emptyTokenSequenceClass")) {
-			processEmptyTokenSequenceClass(node);
-		}
+        if (nodeCat.equals("sequence")) {
+            processSequence(node);
+        }
 
-		if (nodeCat.equals("token")) {
-			processToken(node);
-		}
+        if (nodeCat.equals("emptyTokenSequence")) {
+            processEmptyTokenSequence(node);
+        }
 
-		if (nodeCat.equals("alignment")) {
-			processAlignment(node);
-		}
+        if (nodeCat.equals("emptyTokenSequenceClass")) {
+            processEmptyTokenSequenceClass(node);
+        }
 
-		if (nodeCat.equals("span")) {
-			processSpan(node);
-		}
+        if (nodeCat.equals("token")) {
+            processToken(node);
+        }
 
-		if (nodeCat.equals("disjunction")) {
-			processDisjunction(node);
-		}
+        if (nodeCat.equals("alignment")) {
+            processAlignment(node);
+        }
 
-		if (nodeCat.equals("position")) {
-			processPosition(node);
-		}
+        if (nodeCat.equals("span")) {
+            processSpan(node);
+        }
 
-		if (nodeCat.equals("relation")) {
-			processRelation(node);
-		}
+        if (nodeCat.equals("disjunction")) {
+            processDisjunction(node);
+        }
 
-		if (nodeCat.equals("spanclass")) {
-			processSpanclass(node);
-		}
+        if (nodeCat.equals("position")) {
+            processPosition(node);
+        }
 
-		if (nodeCat.equals("matching")) {
-			processMatching(node);
-		}
+        if (nodeCat.equals("relation")) {
+            processRelation(node);
+        }
 
-		if (nodeCat.equals("submatch")) {
-			processSubmatch(node);
-		}
+        if (nodeCat.equals("spanclass")) {
+            processSpanclass(node);
+        }
 
-		if (nodeCat.equals("meta")) {
-			processMeta(node);
-		}
+        if (nodeCat.equals("matching")) {
+            processMatching(node);
+        }
 
-		if (nodeCat.equals("within") && !getNodeCat(node.getParent()).equals("position")) {
-			processWithin(node);
-		}
+        if (nodeCat.equals("submatch")) {
+            processSubmatch(node);
+        }
 
-		objectsToPop.push(stackedObjects);
+        if (nodeCat.equals("meta")) {
+            processMeta(node);
+        }
 
-		/*
-		 ****************************************************************
-		 **************************************************************** 
-		 *  recursion until 'request' node (root of tree) is processed  *
-		 ****************************************************************
-		 ****************************************************************
-		 */
-		for (int i = 0; i < node.getChildCount(); i++) {
-			ParseTree child = node.getChild(i);
-			processNode(child);
-		}
+        if (nodeCat.equals("within")
+                && !getNodeCat(node.getParent()).equals("position")) {
+            processWithin(node);
+        }
 
-		// Stuff that happens when leaving a node (taking items off the stacks)
-		for (int i = 0; i < objectsToPop.get(0); i++) {
-			objectStack.pop();
-		}
-		objectsToPop.pop();
-		openNodeCats.pop();
-	}
+        objectsToPop.push(stackedObjects);
 
-	private void processSegment(ParseTree node) {
-		// Cover possible quantification (i.e. repetition) of segment
-		ParseTree quantification = getFirstChildWithCat(node, "repetition");
-		if (quantification != null) {
-			LinkedHashMap<String,Object> quantGroup = CqlfObjectGenerator.makeGroup("repetition");
-			Integer[] minmax = parseRepetition(quantification);
-			quantGroup.put("boundary", CqlfObjectGenerator.makeBoundary(minmax[0], minmax[1]));
-			if (minmax[0] != null) quantGroup.put("min", minmax[0]);
-			if (minmax[1] != null) quantGroup.put("max", minmax[1]);
-			addMessage(StatusCodes.DEPRECATED_QUERY_ELEMENT, "Deprecated 2014-07-24: 'min' and 'max' to be " +
-					"supported until 3 months from deprecation date.");
-			putIntoSuperObject(quantGroup);
-			objectStack.push(quantGroup);
-			stackedObjects++;
-		}
-	}
+        /*
+         * ***************************************************************
+         * ***************************************************************
+         * recursion until 'request' node (root of tree) is processed
+         * *
+         * ***********************************************************
+         * ****
+         * ********************************************************
+         * *******
+         */
+        for (int i = 0; i < node.getChildCount(); i++) {
+            ParseTree child = node.getChild(i);
+            processNode(child);
+        }
 
-	private void processSequence(ParseTree node) {
-		LinkedHashMap<String,Object> sequence = CqlfObjectGenerator.makeGroup("sequence");
-		ParseTree distanceNode = getFirstChildWithCat(node, "distance");
+        // Stuff that happens when leaving a node (taking items off
+        // the stacks)
+        for (int i = 0; i < objectsToPop.get(0); i++) {
+            objectStack.pop();
+        }
+        objectsToPop.pop();
+        openNodeCats.pop();
+    }
 
-		if (distanceNode!=null) {
-			Integer[] minmax = parseDistance(distanceNode);
-			LinkedHashMap<String,Object> distance = CqlfObjectGenerator.makeDistance("w", minmax[0], minmax[1]);
-			sequence.put("inOrder", true);
-			ArrayList<Object> distances = new ArrayList<Object>();
-			distances.add(distance);
-			sequence.put("distances", distances);
-			visited.add(distanceNode.getChild(0)); // don't re-visit the emptyTokenSequence node
-		}
-		putIntoSuperObject(sequence);
-		objectStack.push(sequence);
-		stackedObjects++;
-	}
+    private void processSegment(ParseTree node) {
+        // Cover possible quantification (i.e. repetition) of segment
+        ParseTree quantification = getFirstChildWithCat(node, "repetition");
+        if (quantification != null) {
+            LinkedHashMap<String, Object> quantGroup = 
+                    CqlfObjectGenerator.makeGroup("repetition");
+            Integer[] minmax = parseRepetition(quantification);
+            quantGroup.put("boundary",
+                    CqlfObjectGenerator.makeBoundary(minmax[0], minmax[1]));
+            putIntoSuperObject(quantGroup);
+            objectStack.push(quantGroup);
+            stackedObjects++;
+        }
+    }
 
-	@SuppressWarnings("unchecked")
-	/**
-	 * empty tokens at beginning/end of sequence
-	 * @param node
-	 */
-	private void processEmptyTokenSequence(ParseTree node) {
-		Integer[] minmax = parseEmptySegments(node);
-		// object will be either a repetition group or a single empty token
-		LinkedHashMap<String,Object> object; 
-		LinkedHashMap<String,Object> emptyToken = CqlfObjectGenerator.makeToken();
-		if (minmax[0] != 1 || minmax[1] == null || minmax[1] != 1) {
-			object = CqlfObjectGenerator.makeRepetition(minmax[0], minmax[1]);
-			((ArrayList<Object>) object.get("operands")).add(emptyToken);
-		} else {
-			object = emptyToken;
-		}
-		putIntoSuperObject(object);
-		objectStack.push(object);
-		stackedObjects++;
-	}
+    private void processSequence(ParseTree node) {
+        LinkedHashMap<String, Object> sequence = 
+                CqlfObjectGenerator.makeGroup("sequence");
+        ParseTree distanceNode = getFirstChildWithCat(node, "distance");
 
-	private void processEmptyTokenSequenceClass(ParseTree node) {
-		int classId = 1;
-		if (hasChild(node, "spanclass_id")) {
-			classId = Integer.parseInt(node.getChild(1).getChild(0).toStringTree(parser));
-		}
-		LinkedHashMap<String,Object> classGroup = CqlfObjectGenerator.makeSpanClass(classId, false);
-		putIntoSuperObject(classGroup);
-		objectStack.push(classGroup);
-		stackedObjects++;
-	}
+        if (distanceNode != null) {
+            Integer[] minmax = parseDistance(distanceNode);
+            LinkedHashMap<String, Object> distance = 
+                    CqlfObjectGenerator.makeDistance("w", minmax[0], minmax[1]);
+            sequence.put("inOrder", true);
+            ArrayList<Object> distances = new ArrayList<Object>();
+            distances.add(distance);
+            sequence.put("distances", distances);
+            // don't re-visit the emptyTokenSequence node
+            visited.add(distanceNode.getChild(0));
+        }
+        putIntoSuperObject(sequence);
+        objectStack.push(sequence);
+        stackedObjects++;
+    }
 
-	private void processToken(ParseTree node) {
-		LinkedHashMap<String,Object> token = CqlfObjectGenerator.makeToken();
-		// handle negation
-		List<ParseTree> negations = getChildrenWithCat(node, "!");
-		boolean negated = false;
-		boolean isRegex = false;
-		if (negations.size() % 2 == 1) negated = true;
-		if (getNodeCat(node.getChild(0)).equals("key")) {
-			// no 'term' child, but direct key specification: process here
-			LinkedHashMap<String,Object> term = CqlfObjectGenerator.makeTerm();
+    @SuppressWarnings("unchecked")
+    /**
+     * empty tokens at beginning/end of sequence
+     * @param node
+     */
+    private void processEmptyTokenSequence(ParseTree node) {
+        Integer[] minmax = parseEmptySegments(node);
+        // object will be either a repetition group or a single empty
+        // token
+        LinkedHashMap<String, Object> object;
+        LinkedHashMap<String, Object> emptyToken = 
+                CqlfObjectGenerator.makeToken();
+        if (minmax[0] != 1 || minmax[1] == null || minmax[1] != 1) {
+            object = CqlfObjectGenerator.makeRepetition(minmax[0], minmax[1]);
+            ((ArrayList<Object>) object.get("operands")).add(emptyToken);
+        }
+        else {
+            object = emptyToken;
+        }
+        putIntoSuperObject(object);
+        objectStack.push(object);
+        stackedObjects++;
+    }
 
-			String key = node.getChild(0).getText();
-			if (getNodeCat(node.getChild(0).getChild(0)).equals("regex")) {
-				isRegex = true;
-				term.put("type", "type:regex");
-				key = key.substring(1,key.length()-1);
-			}
-			term.put("layer", "orth");
-			term.put("key", key);
-			String matches = negated ? "ne" : "eq";
-			term.put("match", "match:"+matches);
-			ParseTree flagNode = getFirstChildWithCat(node, "flag");
-			if (flagNode != null) {
-				// substring removes leading slash '/'
-				String flag = getNodeCat(flagNode.getChild(0)).substring(1);
-				if (flag.contains("i")) term.put("caseInsensitive", true);
-				else if (flag.contains("I")) term.put("caseInsensitive", false);
-				if (flag.contains("x")) {
-					term.put("type", "type:regex");
-					if (!isRegex) {
-						key = QueryUtils.escapeRegexSpecialChars(key); 
-					}
-					term.put("key", ".*?"+key+".*?"); // overwrite key
-				}
-			}
-			token.put("wrap", term);
-		} else {
-			// child is 'term' or 'termGroup' -> process in extra method 
-			LinkedHashMap<String,Object> termOrTermGroup = 
-					parseTermOrTermGroup(node.getChild(1), negated);
-			token.put("wrap", termOrTermGroup);
-		}
-		putIntoSuperObject(token);
-		visited.add(node.getChild(0));
-		visited.add(node.getChild(2));
-	}
-	
+    private void processEmptyTokenSequenceClass(ParseTree node) {
+        int classId = 1;
+        if (hasChild(node, "spanclass_id")) {
+            classId = Integer.parseInt(node.getChild(1).getChild(0)
+                    .toStringTree(parser));
+        }
+        LinkedHashMap<String, Object> classGroup = 
+                CqlfObjectGenerator.makeSpanClass(classId, false);
+        putIntoSuperObject(classGroup);
+        objectStack.push(classGroup);
+        stackedObjects++;
+    }
 
-	@SuppressWarnings("unchecked")
-	private void processAlignment(ParseTree node) {
-		LinkedHashMap<String,Object> alignClass = CqlfObjectGenerator.makeSpanClass(++classCounter,false);
-		LinkedHashMap<String,Object> metaMap = (LinkedHashMap<String, Object>) requestMap.get("meta");
-		if (metaMap.containsKey("alignment")) {
-			ArrayList<Integer> alignedClasses = new ArrayList<Integer>();
-			try {
-				alignedClasses = (ArrayList<Integer>) metaMap.get("alignment"); 
-			} catch (ClassCastException cce) {
-				alignedClasses.add((Integer) metaMap.get("alignment"));
-			}
-			alignedClasses.add(classCounter);
-			metaMap.put("alignment", alignedClasses);
-		} else {
-			metaMap.put("alignment", classCounter);
-		}
+    private void processToken(ParseTree node) {
+        LinkedHashMap<String, Object> token = CqlfObjectGenerator.makeToken();
+        // handle negation
+        List<ParseTree> negations = getChildrenWithCat(node, "!");
+        boolean negated = false;
+        boolean isRegex = false;
+        if (negations.size() % 2 == 1)
+            negated = true;
+        if (getNodeCat(node.getChild(0)).equals("key")) {
+            // no 'term' child, but direct key specification: process here
+            LinkedHashMap<String, Object> term = CqlfObjectGenerator.makeTerm();
+            String key = node.getChild(0).getText();
+            if (getNodeCat(node.getChild(0).getChild(0)).equals("regex")) {
+                isRegex = true;
+                term.put("type", "type:regex");
+                key = key.substring(1, key.length() - 1);
+            }
+            term.put("layer", "orth");
+            term.put("key", key);
+            String matches = negated ? "ne" : "eq";
+            term.put("match", "match:" + matches);
+            ParseTree flagNode = getFirstChildWithCat(node, "flag");
+            if (flagNode != null) {
+                // substring removes leading slash '/'
+                String flag = getNodeCat(flagNode.getChild(0)).substring(1);
+                if (flag.contains("i"))
+                    term.put("caseInsensitive", true);
+                else if (flag.contains("I"))
+                    term.put("caseInsensitive", false);
+                if (flag.contains("x")) {
+                    term.put("type", "type:regex");
+                    if (!isRegex) {
+                        key = QueryUtils.escapeRegexSpecialChars(key);
+                    }
+                    // overwrite key
+                    term.put("key", ".*?" + key + ".*?"); 
+                }
+            }
+            token.put("wrap", term);
+        }
+        else {
+            // child is 'term' or 'termGroup' -> process in extra method
+            LinkedHashMap<String, Object> termOrTermGroup = 
+                    parseTermOrTermGroup(node.getChild(1), negated);
+            token.put("wrap", termOrTermGroup);
+        }
+        putIntoSuperObject(token);
+        visited.add(node.getChild(0));
+        visited.add(node.getChild(2));
+    }
 
-		putIntoSuperObject(alignClass);
-		objectStack.push(alignClass);
-		stackedObjects++;
-	}
+    @SuppressWarnings("unchecked")
+    private void processAlignment(ParseTree node) {
+        LinkedHashMap<String, Object> alignClass = 
+                CqlfObjectGenerator.makeSpanClass(++classCounter, false);
+        LinkedHashMap<String, Object> metaMap = 
+                (LinkedHashMap<String, Object>) requestMap.get("meta");
+        if (metaMap.containsKey("alignment")) {
+            ArrayList<Integer> alignedClasses = new ArrayList<Integer>();
+            try {
+                alignedClasses = (ArrayList<Integer>) metaMap.get("alignment");
+            }
+            catch (ClassCastException cce) {
+                alignedClasses.add((Integer) metaMap.get("alignment"));
+            }
+            alignedClasses.add(classCounter);
+            metaMap.put("alignment", alignedClasses);
+        }
+        else {
+            metaMap.put("alignment", classCounter);
+        }
 
-	private void processSpan(ParseTree node) {
-		List<ParseTree> negations = getChildrenWithCat(node, "!");
-		boolean negated = false;
-		if (negations.size() % 2 == 1) negated = true;
-		LinkedHashMap<String,Object> span = CqlfObjectGenerator.makeSpan();
-		ParseTree keyNode = getFirstChildWithCat(node, "key");
-		ParseTree layerNode = getFirstChildWithCat(node, "layer");
-		ParseTree foundryNode = getFirstChildWithCat(node, "foundry");
-		ParseTree termOpNode = getFirstChildWithCat(node, "termOp");
-		ParseTree termNode = getFirstChildWithCat(node, "term");
-		ParseTree termGroupNode = getFirstChildWithCat(node, "termGroup");
-		if (foundryNode != null) span.put("foundry", foundryNode.getText());
-		if (layerNode != null) {
-			String layer = layerNode.getText();
-			if (layer.equals("base")) layer="lemma";
-			span.put("layer", layer);
-		}
-		span.put("key", keyNode.getText());
-		if (termOpNode != null) {
-			String termOp = termOpNode.getText();
-			if (termOp.equals("==")) span.put("match", "match:eq");
-			else if (termOp.equals("!=")) span.put("match", "match:ne");
-		}
-		if (termNode != null) {
-			LinkedHashMap<String,Object> termOrTermGroup = 
-					parseTermOrTermGroup(termNode, negated, "span");
-			span.put("attr", termOrTermGroup);
-		}
-		if (termGroupNode != null) {
-			LinkedHashMap<String,Object> termOrTermGroup = 
-					parseTermOrTermGroup(termGroupNode, negated, "span");
-			span.put("attr", termOrTermGroup);
-		}
-		putIntoSuperObject(span);
-		objectStack.push(span);
-		stackedObjects++;
-	}
+        putIntoSuperObject(alignClass);
+        objectStack.push(alignClass);
+        stackedObjects++;
+    }
 
-	private void processDisjunction(ParseTree node) {
-		LinkedHashMap<String,Object> disjunction = CqlfObjectGenerator.makeGroup("or");
-		putIntoSuperObject(disjunction);
-		objectStack.push(disjunction);
-		stackedObjects++;
-	}
+    private void processSpan(ParseTree node) {
+        List<ParseTree> negations = getChildrenWithCat(node, "!");
+        boolean negated = false;
+        if (negations.size() % 2 == 1)
+            negated = true;
+        LinkedHashMap<String, Object> span = CqlfObjectGenerator.makeSpan();
+        ParseTree keyNode = getFirstChildWithCat(node, "key");
+        ParseTree layerNode = getFirstChildWithCat(node, "layer");
+        ParseTree foundryNode = getFirstChildWithCat(node, "foundry");
+        ParseTree termOpNode = getFirstChildWithCat(node, "termOp");
+        ParseTree termNode = getFirstChildWithCat(node, "term");
+        ParseTree termGroupNode = getFirstChildWithCat(node, "termGroup");
+        if (foundryNode != null)
+            span.put("foundry", foundryNode.getText());
+        if (layerNode != null) {
+            String layer = layerNode.getText();
+            if (layer.equals("base"))
+                layer = "lemma";
+            span.put("layer", layer);
+        }
+        span.put("key", keyNode.getText());
+        if (termOpNode != null) {
+            String termOp = termOpNode.getText();
+            if (termOp.equals("=="))
+                span.put("match", "match:eq");
+            else if (termOp.equals("!="))
+                span.put("match", "match:ne");
+        }
+        if (termNode != null) {
+            LinkedHashMap<String, Object> termOrTermGroup = 
+                    parseTermOrTermGroup(termNode, negated, "span");
+            span.put("attr", termOrTermGroup);
+        }
+        if (termGroupNode != null) {
+            LinkedHashMap<String, Object> termOrTermGroup = 
+                    parseTermOrTermGroup(termGroupNode, negated, "span");
+            span.put("attr", termOrTermGroup);
+        }
+        putIntoSuperObject(span);
+        objectStack.push(span);
+        stackedObjects++;
+    }
 
-	private void processPosition(ParseTree node) {
-		LinkedHashMap<String,Object> position = parseFrame(node.getChild(0));
-		putIntoSuperObject(position);
-		objectStack.push(position);
-		stackedObjects++;
-	}
+    private void processDisjunction(ParseTree node) {
+        LinkedHashMap<String, Object> disjunction = 
+                CqlfObjectGenerator.makeGroup("or");
+        putIntoSuperObject(disjunction);
+        objectStack.push(disjunction);
+        stackedObjects++;
+    }
 
-	private void processRelation(ParseTree node) {
-		LinkedHashMap<String, Object> relationGroup = CqlfObjectGenerator.makeGroup("relation");
-		LinkedHashMap<String, Object> relation = CqlfObjectGenerator.makeRelation();
-		relationGroup.put("relation", relation);
-		if (node.getChild(0).getText().equals("dominates")) {
-			relation.put("layer", "c");
-		}
-		ParseTree relSpec = getFirstChildWithCat(node, "relSpec");
-		ParseTree repetition = getFirstChildWithCat(node, "repetition");
-		if (relSpec != null) {
-			ParseTree foundry = getFirstChildWithCat(relSpec, "foundry");
-			ParseTree layer = getFirstChildWithCat(relSpec, "layer");
-			ParseTree key = getFirstChildWithCat(relSpec, "key");
-			if (foundry != null) relation.put("foundry", foundry.getText());
-			if (layer != null) relation.put("layer", layer.getText());
-			if (key != null) relation.put("key", key.getText());
-		}
-		if (repetition != null) {
-			Integer[] minmax =  parseRepetition(repetition);
-			relation.put("boundary", CqlfObjectGenerator.makeBoundary(minmax[0], minmax[1]));
-		}
-		putIntoSuperObject(relationGroup);
-		objectStack.push(relationGroup);
-		stackedObjects++;
-	}
+    private void processPosition(ParseTree node) {
+        LinkedHashMap<String, Object> position = parseFrame(node.getChild(0));
+        putIntoSuperObject(position);
+        objectStack.push(position);
+        stackedObjects++;
+    }
 
-	private void processSpanclass(ParseTree node) {
-		// Step I: get info
-		int classId = 1;
-		if (getNodeCat(node.getChild(1)).equals("spanclass_id")) {
-			String ref = node.getChild(1).getChild(0).toStringTree(parser);
-			try {
-				classId = Integer.parseInt(ref);
-			} catch (NumberFormatException e) {
-				String msg = "The specified class reference in the " +
-						"focus/split-Operator is not a number: " + ref;
-				log.error(msg);
-				addError(StatusCodes.UNDEFINED_CLASS_REFERENCE, msg);
-			}
-			// only allow class id up to 127
-			if (classId > 127) {
-				addWarning("Only class IDs up to 127 are allowed. Your class "+classId+" has been set back to 127. "
-						+ "Check for possible conflict with other classes.");
-				classId = 127;
-			}
-		}
-		LinkedHashMap<String, Object> classGroup = CqlfObjectGenerator.makeSpanClass(classId, false);
-		putIntoSuperObject(classGroup);
-		objectStack.push(classGroup);
-		stackedObjects++;
-		
-	}
+    private void processRelation(ParseTree node) {
+        LinkedHashMap<String, Object> relationGroup = 
+                CqlfObjectGenerator.makeGroup("relation");
+        LinkedHashMap<String, Object> relation = 
+                CqlfObjectGenerator.makeRelation();
+        relationGroup.put("relation", relation);
+        if (node.getChild(0).getText().equals("dominates")) {
+            relation.put("layer", "c");
+        }
+        ParseTree relSpec = getFirstChildWithCat(node, "relSpec");
+        ParseTree repetition = getFirstChildWithCat(node, "repetition");
+        if (relSpec != null) {
+            ParseTree foundry = getFirstChildWithCat(relSpec, "foundry");
+            ParseTree layer = getFirstChildWithCat(relSpec, "layer");
+            ParseTree key = getFirstChildWithCat(relSpec, "key");
+            if (foundry != null)
+                relation.put("foundry", foundry.getText());
+            if (layer != null)
+                relation.put("layer", layer.getText());
+            if (key != null)
+                relation.put("key", key.getText());
+        }
+        if (repetition != null) {
+            Integer[] minmax = parseRepetition(repetition);
+            relation.put("boundary",
+                    CqlfObjectGenerator.makeBoundary(minmax[0], minmax[1]));
+        }
+        putIntoSuperObject(relationGroup);
+        objectStack.push(relationGroup);
+        stackedObjects++;
+    }
 
-	private void processMatching(ParseTree node) {
-		// Step I: get info
-		ArrayList<Integer> classRefs = new ArrayList<Integer>();
-		String classRefOp = null;
-		if (getNodeCat(node.getChild(2)).equals("spanclass_id")) {
-			ParseTree spanNode = node.getChild(2);
-			for (int i = 0; i < spanNode.getChildCount() - 1; i++) {
-				String ref = spanNode.getChild(i).getText();
-				if (ref.equals("|") || ref.equals("&")) {
-					classRefOp = ref.equals("|") ? "intersection" : "union";
-				} else {
-					try {
-						int classRef = Integer.parseInt(ref);
-						// only allow class id up to 127
-						if (classRef > 127) {
-							addWarning("Only class references up to 127 are allowed. Your reference to class "+classRef+" has been set back to 127. "
-									+ "Check for possible conflict with other classes.");
-							classRef = 127;
-						}
-						classRefs.add(classRef);
-					} catch (NumberFormatException e) {
-						String err = "The specified class reference in the " +
-								"shrink/split-Operator is not a number.";
-						addError(StatusCodes.UNDEFINED_CLASS_REFERENCE, err);
-					}
-				}
-			}
-		} else {
-			classRefs.add(1);
-		}
-		LinkedHashMap<String, Object> referenceGroup = CqlfObjectGenerator.makeReference(classRefs);
+    private void processSpanclass(ParseTree node) {
+        // Step I: get info
+        int classId = 1;
+        if (getNodeCat(node.getChild(1)).equals("spanclass_id")) {
+            String ref = node.getChild(1).getChild(0).toStringTree(parser);
+            try {
+                classId = Integer.parseInt(ref);
+            }
+            catch (NumberFormatException e) {
+                String msg = "The specified class reference in the "
+                        + "focus/split-Operator is not a number: " + ref;
+                log.error(msg);
+                addError(StatusCodes.UNDEFINED_CLASS_REFERENCE, msg);
+            }
+            // only allow class id up to 127
+            if (classId > 127) {
+                addWarning("Only class IDs up to 127 are allowed. Your class "
+                        + classId + " has been set back to 127. "
+                        + "Check for possible conflict with other classes.");
+                classId = 127;
+            }
+        }
+        LinkedHashMap<String, Object> classGroup = 
+                CqlfObjectGenerator.makeSpanClass(classId, false);
+        putIntoSuperObject(classGroup);
+        objectStack.push(classGroup);
+        stackedObjects++;
 
-		String type = node.getChild(0).toStringTree(parser);
-		// Default is focus(), if deviating catch here
-		if (type.equals("split")) referenceGroup.put("operation", "operation:split");
-		if (type.equals("submatch") || type.equals("shrink")) {
-			String warning = "Deprecated 2014-07-24: "+type + "() as a match reducer " +
-					"to a specific class is deprecated in favor of focus() and will " +
-					"only be supported for 3 months after deprecation date.";
-			addMessage(StatusCodes.DEPRECATED_QUERY_ELEMENT, warning);
-		}
-		if (classRefOp != null) {
-			referenceGroup.put("classRefOp", "classRefOp:" + classRefOp);
-		}
-		ArrayList<Object> referenceOperands = new ArrayList<Object>();
-		referenceGroup.put("operands", referenceOperands);
-		// Step II: decide where to put the group
-		putIntoSuperObject(referenceGroup);
-		objectStack.push(referenceGroup);
-		stackedObjects++;
-		visited.add(node.getChild(0));
-	}
+    }
 
-	private void processSubmatch(ParseTree node) {
-		LinkedHashMap<String,Object> submatch = CqlfObjectGenerator.makeReference(null);
-		submatch.put("operands", new ArrayList<Object>());
-		ParseTree startpos = getFirstChildWithCat(node,"startpos");
-		ParseTree length = getFirstChildWithCat(node,"length");
-		ArrayList<Integer> spanRef = new ArrayList<Integer>();
-		spanRef.add(Integer.parseInt(startpos.getText()));
-		if (length != null) {
-			spanRef.add(Integer.parseInt(length.getText()));
-		}
-		submatch.put("spanRef", spanRef);
-		putIntoSuperObject(submatch);
-		objectStack.push(submatch);
-		stackedObjects++;
-		visited.add(node.getChild(0));
-	}
+    private void processMatching(ParseTree node) {
+        // Step I: get info
+        ArrayList<Integer> classRefs = new ArrayList<Integer>();
+        String classRefOp = null;
+        if (getNodeCat(node.getChild(2)).equals("spanclass_id")) {
+            ParseTree spanNode = node.getChild(2);
+            for (int i = 0; i < spanNode.getChildCount() - 1; i++) {
+                String ref = spanNode.getChild(i).getText();
+                if (ref.equals("|") || ref.equals("&")) {
+                    classRefOp = ref.equals("|") ? "intersection" : "union";
+                }
+                else {
+                    try {
+                        int classRef = Integer.parseInt(ref);
+                        // only allow class id up to 127
+                        if (classRef > 127) {
+                            addWarning("Only class references up to 127 are "
+                                    + "allowed. Your reference to class "
+                                    + classRef + " has been set back to 127. "
+                                    + "Check for possible conflict with "
+                                    + "other classes.");
+                            classRef = 127;
+                        }
+                        classRefs.add(classRef);
+                    }
+                    catch (NumberFormatException e) {
+                        String err = "The specified class reference in the "
+                                + "shrink/split-Operator is not a number.";
+                        addError(StatusCodes.UNDEFINED_CLASS_REFERENCE, err);
+                    }
+                }
+            }
+        }
+        else {
+            classRefs.add(1);
+        }
+        LinkedHashMap<String, Object> referenceGroup = 
+                CqlfObjectGenerator.makeReference(classRefs);
 
-	/**
-	 * Creates meta field in requestMap, later filled by terms
-	 * @param node
-	 */
-	private void processMeta(ParseTree node) {
-		LinkedHashMap<String, Object> metaFilter = new LinkedHashMap<String, Object>();
-		requestMap.put("meta", metaFilter);
-		metaFilter.put("@type", "korap:meta");
-	}
+        String type = node.getChild(0).toStringTree(parser);
+        // Default is focus(), if deviating catch here
+        if (type.equals("split"))
+            referenceGroup.put("operation", "operation:split");
+        if (type.equals("submatch") || type.equals("shrink")) {
+            String warning = "Deprecated 2014-07-24: "
+                    + type
+                    + "() as a match reducer "
+                    + "to a specific class is deprecated in favor of focus() and will "
+                    + "only be supported for 3 months after deprecation date.";
+            addMessage(StatusCodes.DEPRECATED_QUERY_ELEMENT, warning);
+        }
+        if (classRefOp != null) {
+            referenceGroup.put("classRefOp", "classRefOp:" + classRefOp);
+        }
+        ArrayList<Object> referenceOperands = new ArrayList<Object>();
+        referenceGroup.put("operands", referenceOperands);
+        // Step II: decide where to put the group
+        putIntoSuperObject(referenceGroup);
+        objectStack.push(referenceGroup);
+        stackedObjects++;
+        visited.add(node.getChild(0));
+    }
 
-	/**
-	 * NB: requires that parent is not 'position'!
-	 * @param node
-	 */
-	private void processWithin(ParseTree node) {
-		ParseTree domainNode = node.getChild(2);
-		String domain = getNodeCat(domainNode);
-		LinkedHashMap<String, Object> curObject = 
-				(LinkedHashMap<String, Object>) objectStack.getFirst();
-		curObject.put("within", domain);
-		visited.add(node.getChild(0));
-		visited.add(node.getChild(1));
-		visited.add(domainNode);
-	}
+    private void processSubmatch(ParseTree node) {
+        LinkedHashMap<String, Object> submatch = 
+                CqlfObjectGenerator.makeReference(null);
+        submatch.put("operands", new ArrayList<Object>());
+        ParseTree startpos = getFirstChildWithCat(node, "startpos");
+        ParseTree length = getFirstChildWithCat(node, "length");
+        ArrayList<Integer> spanRef = new ArrayList<Integer>();
+        spanRef.add(Integer.parseInt(startpos.getText()));
+        if (length != null) {
+            spanRef.add(Integer.parseInt(length.getText()));
+        }
+        submatch.put("spanRef", spanRef);
+        putIntoSuperObject(submatch);
+        objectStack.push(submatch);
+        stackedObjects++;
+        visited.add(node.getChild(0));
+    }
 
-	/**
-	 * Parses a repetition node
-	 * @param node
-	 * @return A two-element array, of which the first element is an int representing 
-	 * the minimal number of repetitions of the quantified element, and the second 
-	 * element representing the maximal number of repetitions 
-	 */
-	private Integer[] parseRepetition(ParseTree node) {
-		Integer min = 0, max = 0;
-		boolean maxInfinite = false;
-		// (repetition) node can be of two types: 'kleene' or 'range'
-		ParseTree repetitionTypeNode = node.getChild(0);
-		String repetitionType = getNodeCat(repetitionTypeNode);
-		if (repetitionType.equals("kleene")) {
-			// kleene operators (+ and *) as well as optionality (?)
-			String kleeneOp = repetitionTypeNode.getText();
-			if (kleeneOp.equals("*")) {
-				maxInfinite = true;
-			} else if (kleeneOp.equals("+")) {
-				min = 1;
-				maxInfinite = true;
-			} if (kleeneOp.equals("?")) {
-				max = 1;
-			}
-		} else {
-			// Range node of form "{ min , max }" or "{ max }" or "{ , max }"  or "{ min , }"
-			ParseTree minNode = getFirstChildWithCat(repetitionTypeNode, "min");
-			ParseTree maxNode = getFirstChildWithCat(repetitionTypeNode, "max");
-			if (maxNode!=null) max = Integer.parseInt(maxNode.getText());
-			else maxInfinite = true;
-			// min is optional: if not specified, min = max
-			if (minNode!=null) min = Integer.parseInt(minNode.getText());
-			else if (hasChild(repetitionTypeNode, ",")) min = 0;
-			else {
-				min = max;
-//				warnings.add("Your query contains a segment of the form {n}, where n is some number. This expression is ambiguous. "
-//						+ "It could mean a repetition (\"Repeat the previous element n times!\") or a word form that equals the number, "
-//						+ "enclosed by a \"class\" (which is denoted by braces like '{x}', see the documentation on classes)."
-//						+ "KorAP has by default interpreted the segment as a repetition statement. If you want to express the"
-//						+ "number as a word form inside a class, use the non-shorthand form {[orth=n]}.");
-			}
-		}
-		if (maxInfinite) {
-			max = null;
-		}
-		return new Integer[]{min,max};
-	}
+    /**
+     * Creates meta field in requestMap, later filled by terms
+     * 
+     * @param node
+     */
+    private void processMeta(ParseTree node) {
+        LinkedHashMap<String, Object> metaFilter = new LinkedHashMap<String, Object>();
+        requestMap.put("meta", metaFilter);
+        metaFilter.put("@type", "korap:meta");
+    }
 
-	private LinkedHashMap<String,Object> parseFrame(ParseTree node) {
-		String operator = node.toStringTree(parser).toLowerCase();
-		String[] frames = new String[]{""};
-		String[] classRefCheck = new String[]{"classRefCheck:includes"};
-		switch (operator) {
-		case "contains":
-			frames = new String[]{"frames:contains"};
-			break;
-		case "matches":
-			frames = new String[]{"frames:matches"};
-			break;
-		case "startswith":
-			frames = new String[]{"frames:startswith"};
-			break;
-		case "endswith":
-			frames = new String[]{"frames:endswith"};
-			break;	
-		case "overlaps":
-			frames = new String[]{"frames:overlapsLeft","frames:overlapsRight"};
-			classRefCheck = new String[]{"classRefCheck:intersects"};
-			break;
-		}
-		return CqlfObjectGenerator.makePosition(frames,classRefCheck);
-	}
+    /**
+     * NB: requires that parent is not 'position'!
+     * 
+     * @param node
+     */
+    private void processWithin(ParseTree node) {
+        ParseTree domainNode = node.getChild(2);
+        String domain = getNodeCat(domainNode);
+        LinkedHashMap<String, Object> curObject = (LinkedHashMap<String, Object>) objectStack
+                .getFirst();
+        curObject.put("within", domain);
+        visited.add(node.getChild(0));
+        visited.add(node.getChild(1));
+        visited.add(domainNode);
+    }
 
+    /**
+     * Parses a repetition node
+     * 
+     * @param node
+     * @return A two-element array, of which the first element is an
+     *         int representing the minimal number of repetitions of
+     *         the quantified element, and the second element
+     *         representing the maximal number of repetitions
+     */
+    private Integer[] parseRepetition(ParseTree node) {
+        Integer min = 0, max = 0;
+        boolean maxInfinite = false;
+        // (repetition) node can be of two types: 'kleene' or 'range'
+        ParseTree repetitionTypeNode = node.getChild(0);
+        String repetitionType = getNodeCat(repetitionTypeNode);
+        if (repetitionType.equals("kleene")) {
+            // kleene operators (+ and *) as well as optionality (?)
+            String kleeneOp = repetitionTypeNode.getText();
+            if (kleeneOp.equals("*")) {
+                maxInfinite = true;
+            }
+            else if (kleeneOp.equals("+")) {
+                min = 1;
+                maxInfinite = true;
+            }
+            if (kleeneOp.equals("?")) {
+                max = 1;
+            }
+        }
+        else {
+            // Range node of form "{ min , max }" or "{ max }" or
+            // "{ , max }" or "{ min , }"
+            ParseTree minNode = getFirstChildWithCat(repetitionTypeNode, "min");
+            ParseTree maxNode = getFirstChildWithCat(repetitionTypeNode, "max");
+            if (maxNode != null)
+                max = Integer.parseInt(maxNode.getText());
+            else
+                maxInfinite = true;
+            // min is optional: if not specified, min = max
+            if (minNode != null)
+                min = Integer.parseInt(minNode.getText());
+            else if (hasChild(repetitionTypeNode, ","))
+                min = 0;
+            else {
+                min = max;
+//                addWarning("Your query contains a segment of the form {n}, where n is some number. This expression is ambiguous. "
+//                        + "It could mean a repetition (\"Repeat the previous element n times!\") or a word form that equals the number, "
+//                        + "enclosed by a \"class\" (which is denoted by braces like '{x}', see the documentation on classes)."
+//                        + "KorAP has by default interpreted the segment as a repetition statement. If you want to express the"
+//                        + "number as a word form inside a class, use the non-shorthand form {[orth=n]}.");
+            }
+        }
+        if (maxInfinite) {
+            max = null;
+        }
+        return new Integer[] { min, max };
+    }
 
-	private LinkedHashMap<String, Object> parseTermOrTermGroup(
-			ParseTree node, boolean negated) {
-		return parseTermOrTermGroup(node, negated, "token");
-	}
+    private LinkedHashMap<String, Object> parseFrame(ParseTree node) {
+        String operator = node.toStringTree(parser).toLowerCase();
+        String[] frames = new String[] { "" };
+        String[] classRefCheck = new String[] { "classRefCheck:includes" };
+        switch (operator) {
+            case "contains":
+                frames = new String[] { "frames:contains" };
+                break;
+            case "matches":
+                frames = new String[] { "frames:matches" };
+                break;
+            case "startswith":
+                frames = new String[] { "frames:startswith" };
+                break;
+            case "endswith":
+                frames = new String[] { "frames:endswith" };
+                break;
+            case "overlaps":
+                frames = new String[] { "frames:overlapsLeft",
+                        "frames:overlapsRight" };
+                classRefCheck = new String[] { "classRefCheck:intersects" };
+                break;
+        }
+        return CqlfObjectGenerator.makePosition(frames, classRefCheck);
+    }
 
-	/**
-	 * Parses a (term) or (termGroup) node
-	 * @param node
-	 * @param negatedGlobal Indicates whether the term/termGroup is globally negated, e.g. through a negation 
-	 * operator preceding the related token like "![base=foo]". Global negation affects the term's "match" parameter.
-	 * @return A term or termGroup object, depending on input
-	 */
-	@SuppressWarnings("unchecked")
-	private LinkedHashMap<String, Object> parseTermOrTermGroup(ParseTree node, boolean negatedGlobal, String mode) {
-		String nodeCat = getNodeCat(node);
-		if (nodeCat.equals("term")) {
-			String key = null;
-			LinkedHashMap<String,Object> term = CqlfObjectGenerator.makeTerm();
-			// handle negation
-			boolean negated = negatedGlobal;
-			boolean isRegex = false;
-			List<ParseTree> negations = getChildrenWithCat(node, "!");
-			if (negations.size() % 2 == 1) negated = !negated;
-			// retrieve possible nodes
-			ParseTree keyNode = getFirstChildWithCat(node, "key");
-			ParseTree valueNode = getFirstChildWithCat(node, "value");
-			ParseTree layerNode = getFirstChildWithCat(node, "layer");
-			ParseTree foundryNode = getFirstChildWithCat(node, "foundry");
-			ParseTree termOpNode = getFirstChildWithCat(node, "termOp");
-			ParseTree flagNode = getFirstChildWithCat(node, "flag");
-			// process foundry
-			if (foundryNode != null) term.put("foundry", foundryNode.getText());
-			// process layer: map "base" -> "lemma"
-			if (layerNode != null) {
-				String layer = layerNode.getText();
-				if (layer.equals("base")) layer="lemma";
-				if (mode.equals("span")) term.put("key", layer);
-				else term.put("layer", layer);
-			}
-			// process key: 'normal' or regex?
-			key = keyNode.getText();
-			if (getNodeCat(keyNode.getChild(0)).equals("regex")) {
-				isRegex = true;
-				term.put("type", "type:regex");
-				key = key.substring(1, key.length()-1); // remove leading and trailing quotes
-			}
-			if (mode.equals("span")) term.put("value", key);
-			else term.put("key", key);
-			// process value
-			if (valueNode != null) term.put("value", valueNode.getText());
-			// process operator ("match" property)
-			if (termOpNode != null) {
-				String termOp = termOpNode.getText();
-				negated = termOp.contains("!") ? !negated : negated; 
-				if (!negated) term.put("match", "match:eq");
-				else term.put("match", "match:ne");
-			}
-			// process possible flags
-			if (flagNode != null) {
-				String flag = getNodeCat(flagNode.getChild(0)).substring(1); //substring removes leading slash '/'
-				if (flag.contains("i")) term.put("caseInsensitive", true);
-				else if (flag.contains("I")) term.put("caseInsensitive", false);
-				if (flag.contains("x")) {
-					if (!isRegex) {
-						key = QueryUtils.escapeRegexSpecialChars(key); 
-					}
-					term.put("key", ".*?"+key+".*?");  // flag 'x' allows submatches: overwrite key with appended .*? 
-					term.put("type", "type:regex");
-				}
-			}
-			return term;
-		} else if (nodeCat.equals("termGroup")) {
-			// For termGroups, establish a boolean relation between operands and recursively call this function with
-			// the term or termGroup operands
-			LinkedHashMap<String,Object> termGroup = null;
-			ParseTree leftOp = null;
-			ParseTree rightOp = null;
-			// check for leading/trailing parantheses
-			if (!getNodeCat(node.getChild(0)).equals("(")) leftOp = node.getChild(0);
-			else leftOp = node.getChild(1);
-			if (!getNodeCat(node.getChild(node.getChildCount()-1)).equals(")")) rightOp = node.getChild(node.getChildCount()-1);
-			else rightOp = node.getChild(node.getChildCount()-2);
-			// establish boolean relation
-			ParseTree boolOp = getFirstChildWithCat(node, "boolOp"); 
-			String operator = boolOp.getText().equals("&") ? "and" : "or";
-			termGroup = CqlfObjectGenerator.makeTermGroup(operator);
-			ArrayList<Object> operands = (ArrayList<Object>) termGroup.get("operands");
-			// recursion with left/right operands
-			operands.add(parseTermOrTermGroup(leftOp, negatedGlobal, mode));
-			operands.add(parseTermOrTermGroup(rightOp, negatedGlobal, mode));
-			return termGroup;
-		}
-		return null;
-	}
+    private LinkedHashMap<String, Object> parseTermOrTermGroup(ParseTree node,
+            boolean negated) {
+        return parseTermOrTermGroup(node, negated, "token");
+    }
 
-	/**
-	 * Puts an object into the operands list of its governing (or "super") object which had been placed on the
-	 * {@link #objectStack} before and is still on top of the stack. If this is the top object of the tree, it is put there
-	 * instead of into some (non-existent) operand stack.
-	 * @param object The object to be inserted
-	 */
-	private void putIntoSuperObject(LinkedHashMap<String, Object> object) {
-		putIntoSuperObject(object, 0);
-	}
+    /**
+     * Parses a (term) or (termGroup) node
+     * 
+     * @param node
+     * @param negatedGlobal
+     *            Indicates whether the term/termGroup is globally
+     *            negated, e.g. through a negation operator preceding
+     *            the related token like "![base=foo]". Global
+     *            negation affects the term's "match" parameter.
+     * @return A term or termGroup object, depending on input
+     */
+    @SuppressWarnings("unchecked")
+    private LinkedHashMap<String, Object> parseTermOrTermGroup(ParseTree node,
+            boolean negatedGlobal, String mode) {
+        String nodeCat = getNodeCat(node);
+        if (nodeCat.equals("term")) {
+            String key = null;
+            LinkedHashMap<String, Object> term = 
+                    CqlfObjectGenerator.makeTerm();
+            // handle negation
+            boolean negated = negatedGlobal;
+            boolean isRegex = false;
+            List<ParseTree> negations = getChildrenWithCat(node, "!");
+            if (negations.size() % 2 == 1)
+                negated = !negated;
+            // retrieve possible nodes
+            ParseTree keyNode = getFirstChildWithCat(node, "key");
+            ParseTree valueNode = getFirstChildWithCat(node, "value");
+            ParseTree layerNode = getFirstChildWithCat(node, "layer");
+            ParseTree foundryNode = getFirstChildWithCat(node, "foundry");
+            ParseTree termOpNode = getFirstChildWithCat(node, "termOp");
+            ParseTree flagNode = getFirstChildWithCat(node, "flag");
+            // process foundry
+            if (foundryNode != null)
+                term.put("foundry", foundryNode.getText());
+            // process layer: map "base" -> "lemma"
+            if (layerNode != null) {
+                String layer = layerNode.getText();
+                if (layer.equals("base"))
+                    layer = "lemma";
+                if (mode.equals("span"))
+                    term.put("key", layer);
+                else
+                    term.put("layer", layer);
+            }
+            // process key: 'normal' or regex?
+            key = keyNode.getText();
+            if (getNodeCat(keyNode.getChild(0)).equals("regex")) {
+                isRegex = true;
+                term.put("type", "type:regex");
+             // remove leading and trailing quotes
+                key = key.substring(1, key.length() - 1); 
+            }
+            if (mode.equals("span"))
+                term.put("value", key);
+            else
+                term.put("key", key);
+            // process value
+            if (valueNode != null)
+                term.put("value", valueNode.getText());
+            // process operator ("match" property)
+            if (termOpNode != null) {
+                String termOp = termOpNode.getText();
+                negated = termOp.contains("!") ? !negated : negated;
+                if (!negated)
+                    term.put("match", "match:eq");
+                else
+                    term.put("match", "match:ne");
+            }
+            // process possible flags
+            if (flagNode != null) {
+             // substring removes leading slash
+                String flag = getNodeCat(flagNode.getChild(0)).substring(1); 
+                if (flag.contains("i"))
+                    term.put("caseInsensitive", true);
+                else if (flag.contains("I"))
+                    term.put("caseInsensitive", false);
+                if (flag.contains("x")) {
+                    if (!isRegex) {
+                        key = QueryUtils.escapeRegexSpecialChars(key);
+                    }
+                    // flag 'x' allows submatches: 
+                    // overwrite key with appended .*?
+                    term.put("key", ".*?" + key + ".*?"); // 
+                    term.put("type", "type:regex");
+                }
+            }
+            return term;
+        }
+        else if (nodeCat.equals("termGroup")) {
+            // For termGroups, establish a boolean relation between
+            // operands and recursively call this function with
+            // the term or termGroup operands
+            LinkedHashMap<String, Object> termGroup = null;
+            ParseTree leftOp = null;
+            ParseTree rightOp = null;
+            // check for leading/trailing parantheses
+            if (!getNodeCat(node.getChild(0)).equals("("))
+                leftOp = node.getChild(0);
+            else
+                leftOp = node.getChild(1);
+            if (!getNodeCat(node.getChild(node.getChildCount() - 1))
+                    .equals(")"))
+                rightOp = node.getChild(node.getChildCount() - 1);
+            else
+                rightOp = node.getChild(node.getChildCount() - 2);
+            // establish boolean relation
+            ParseTree boolOp = getFirstChildWithCat(node, "boolOp");
+            String operator = boolOp.getText().equals("&") ? "and" : "or";
+            termGroup = CqlfObjectGenerator.makeTermGroup(operator);
+            ArrayList<Object> operands = (ArrayList<Object>) termGroup
+                    .get("operands");
+            // recursion with left/right operands
+            operands.add(parseTermOrTermGroup(leftOp, negatedGlobal, mode));
+            operands.add(parseTermOrTermGroup(rightOp, negatedGlobal, mode));
+            return termGroup;
+        }
+        return null;
+    }
 
-	/**
-	 * Puts an object into the operands list of its governing (or "super") object which had been placed on the
-	 * {@link #objectStack} before. If this is the top object of the tree, it is put there
-	 * instead of into some (non-existent) operand stack.
-	 * @param object The object to be inserted
-	 * @param objStackPosition Indicated the position of the super object on the {@link #objectStack} (in case not the top
-	 * element of the stack is the super object.
-	 */
-	@SuppressWarnings({ "unchecked" })
-	private void putIntoSuperObject(LinkedHashMap<String, Object> object, int objStackPosition) {
-		if (objectStack.size()>objStackPosition) {
-			ArrayList<Object> topObjectOperands = (ArrayList<Object>) objectStack.get(objStackPosition).get("operands");
-			topObjectOperands.add(object);
-		} else {
-			requestMap.put("query", object);
-		}
-	}
+    /**
+     * Puts an object into the operands list of its governing (or
+     * "super") object which had been placed on the
+     * {@link #objectStack} before and is still on top of the stack.
+     * If this is the top object of the tree, it is put there instead
+     * of into some (non-existent) operand stack.
+     * 
+     * @param object
+     *            The object to be inserted
+     */
+    private void putIntoSuperObject(LinkedHashMap<String, Object> object) {
+        putIntoSuperObject(object, 0);
+    }
 
-	/**
-	 * Basically only increases the min and max counters as required by Poliqarp
-	 * @param distanceNode
-	 * @return
-	 */
-	private Integer[] parseDistance(ParseTree distanceNode) {
-		int emptyTokenSeqIndex = getNodeCat(distanceNode).equals("distance") ? 0 : 2; 
-		Integer[] minmax = parseEmptySegments(distanceNode.getChild(emptyTokenSeqIndex));
-		Integer min = minmax[0];
-		Integer max = minmax[1];
-		min++;
-		if (max != null) max++;
-		//		min = cropToMaxValue(min);
-		//		max = cropToMaxValue(max);
-		return new Integer[]{min, max};
-	}
+    /**
+     * Puts an object into the operands list of its governing (or
+     * "super") object which had been placed on the
+     * {@link #objectStack} before. If this is the top object of the
+     * tree, it is put there instead of into some (non-existent)
+     * operand stack.
+     * 
+     * @param object
+     *            The object to be inserted
+     * @param objStackPosition
+     *            Indicated the position of the super object on the
+     *            {@link #objectStack} (in case not the top element of
+     *            the stack is the super object.
+     */
+    @SuppressWarnings({ "unchecked" })
+    private void putIntoSuperObject(LinkedHashMap<String, Object> object,
+            int objStackPosition) {
+        if (objectStack.size() > objStackPosition) {
+            ArrayList<Object> topObjectOperands = (ArrayList<Object>) objectStack
+                    .get(objStackPosition).get("operands");
+            topObjectOperands.add(object);
+        }
+        else {
+            requestMap.put("query", object);
+        }
+    }
 
-	private Integer[] parseEmptySegments(ParseTree emptySegments) {
-		Integer min = 0;
-		Integer max = 0;
-		ParseTree child;
-		for (int i = 0; i < emptySegments.getChildCount(); i++) {
-			child = emptySegments.getChild(i);
-			ParseTree nextSibling = emptySegments.getChild(i + 1);
-			if (child.toStringTree(parser).equals("(emptyToken [ ])")) {
-				if (nextSibling != null && getNodeCat(nextSibling).equals("repetition")) {
-					Integer[] minmax = parseRepetition(nextSibling);
-					min += minmax[0];
-					if (minmax[1] != null) {
-						max += minmax[1];
-					} else {
-						max = null;
-					}
-				} else {
-					min++;
-					max++;
-				}
-			}
-		}
-		//		min = cropToMaxValue(min);
-		//		max = cropToMaxValue(max);
-		return new Integer[]{min, max};
-	}
+    /**
+     * Basically only increases the min and max counters as required
+     * by Poliqarp
+     * 
+     * @param distanceNode
+     * @return
+     */
+    private Integer[] parseDistance(ParseTree distanceNode) {
+        int emptyTokenSeqIndex = getNodeCat(distanceNode).equals("distance") ? 0
+                : 2;
+        Integer[] minmax = parseEmptySegments(distanceNode
+                .getChild(emptyTokenSeqIndex));
+        Integer min = minmax[0];
+        Integer max = minmax[1];
+        min++;
+        if (max != null)
+            max++;
+        // min = cropToMaxValue(min);
+        // max = cropToMaxValue(max);
+        return new Integer[] { min, max };
+    }
 
+    private Integer[] parseEmptySegments(ParseTree emptySegments) {
+        Integer min = 0;
+        Integer max = 0;
+        ParseTree child;
+        for (int i = 0; i < emptySegments.getChildCount(); i++) {
+            child = emptySegments.getChild(i);
+            ParseTree nextSibling = emptySegments.getChild(i + 1);
+            if (child.toStringTree(parser).equals("(emptyToken [ ])")) {
+                if (nextSibling != null
+                        && getNodeCat(nextSibling).equals("repetition")) {
+                    Integer[] minmax = parseRepetition(nextSibling);
+                    min += minmax[0];
+                    if (minmax[1] != null) {
+                        max += minmax[1];
+                    }
+                    else {
+                        max = null;
+                    }
+                }
+                else {
+                    min++;
+                    max++;
+                }
+            }
+        }
+        // min = cropToMaxValue(min);
+        // max = cropToMaxValue(max);
+        return new Integer[] { min, max };
+    }
 
-	private ParserRuleContext parsePoliqarpQuery(String query) {
-		Lexer lexer = new PoliqarpPlusLexer((CharStream) null);
-		ParserRuleContext tree = null;
-		Antlr4DescriptiveErrorListener errorListener = new Antlr4DescriptiveErrorListener(query);
-		// Like p. 111
-		try {
-			// Tokenize input data
-			ANTLRInputStream input = new ANTLRInputStream(query);
-			lexer.setInputStream(input);
-			CommonTokenStream tokens = new CommonTokenStream(lexer);
-			parser = new PoliqarpPlusParser(tokens);
+    private ParserRuleContext parsePoliqarpQuery(String query) {
+        Lexer lexer = new PoliqarpPlusLexer((CharStream) null);
+        ParserRuleContext tree = null;
+        Antlr4DescriptiveErrorListener errorListener = 
+                new Antlr4DescriptiveErrorListener(query);
+        // Like p. 111
+        try {
+            // Tokenize input data
+            ANTLRInputStream input = new ANTLRInputStream(query);
+            lexer.setInputStream(input);
+            CommonTokenStream tokens = new CommonTokenStream(lexer);
+            parser = new PoliqarpPlusParser(tokens);
 
-			// Don't throw out erroneous stuff
-			parser.setErrorHandler(new BailErrorStrategy());
-			lexer.removeErrorListeners();
+            // Don't throw out erroneous stuff
+            parser.setErrorHandler(new BailErrorStrategy());
+            lexer.removeErrorListeners();
             lexer.addErrorListener(errorListener);
             parser.removeErrorListeners();
             parser.addErrorListener(errorListener);
 
-			// Get starting rule from parser
-			Method startRule = PoliqarpPlusParser.class.getMethod("request");
-			tree = (ParserRuleContext) startRule.invoke(parser, (Object[]) null);
-		}
-		// Some things went wrong ...
-		catch (Exception e) {
-			log.error("Could not parse query. Please make sure it is well-formed.");
-			log.error(errorListener.generateFullErrorMsg().toString());
-			addError(errorListener.generateFullErrorMsg());
-		}
-		return tree;
-	}
+            // Get starting rule from parser
+            Method startRule = PoliqarpPlusParser.class.getMethod("request");
+            tree = 
+                 (ParserRuleContext) startRule.invoke(parser, (Object[]) null);
+        }
+        // Some things went wrong ...
+        catch (Exception e) {
+            log.error("Could not parse query. "
+                    + "Please make sure it is well-formed.");
+            log.error(errorListener.generateFullErrorMsg().toString());
+            addError(errorListener.generateFullErrorMsg());
+        }
+        return tree;
+    }
 }
diff --git a/src/main/java/de/ids_mannheim/korap/query/serialize/QuerySerializer.java b/src/main/java/de/ids_mannheim/korap/query/serialize/QuerySerializer.java
index 9760854..e056c30 100644
--- a/src/main/java/de/ids_mannheim/korap/query/serialize/QuerySerializer.java
+++ b/src/main/java/de/ids_mannheim/korap/query/serialize/QuerySerializer.java
@@ -19,22 +19,22 @@
  * @author bingel, hanl
  */
 public class QuerySerializer {
-	
-	public enum QueryLanguage {
-		POLIQARPPLUS, ANNIS, COSMAS2, CQL, CQP
-	}
-	
-	static HashMap<String, Class<? extends AbstractQueryProcessor>> qlProcessorAssignment;
 
-	static {
-		qlProcessorAssignment  = new HashMap<String, Class<? extends AbstractQueryProcessor>>();
-		qlProcessorAssignment.put("poliqarpplus", PoliqarpPlusQueryProcessor.class);
-		qlProcessorAssignment.put("cosmas2", Cosmas2QueryProcessor.class);
-		qlProcessorAssignment.put("annis", AnnisQueryProcessor.class);
-		qlProcessorAssignment.put("cql", CqlQueryProcessor.class);
-	}
-	
-	
+    public enum QueryLanguage {
+        POLIQARPPLUS, ANNIS, COSMAS2, CQL, CQP
+    }
+
+    static HashMap<String, Class<? extends AbstractQueryProcessor>> qlProcessorAssignment;
+
+    static {
+        qlProcessorAssignment = 
+                new HashMap<String, Class<? extends AbstractQueryProcessor>>();
+        qlProcessorAssignment.put("poliqarpplus", PoliqarpPlusQueryProcessor.class);
+        qlProcessorAssignment.put("cosmas2", Cosmas2QueryProcessor.class);
+        qlProcessorAssignment.put("annis", AnnisQueryProcessor.class);
+        qlProcessorAssignment.put("cql", CqlQueryProcessor.class);
+    }
+
     private Logger qllogger = KorAPLogger.initiate("ql");
     public static String queryLanguageVersion;
 
@@ -54,44 +54,55 @@
     public static void main(String[] args) {
         /*
          * just for testing...
-		 */
+         */
         QuerySerializer jg = new QuerySerializer();
         int i = 0;
         String[] queries;
         if (args.length == 0) {
-            queries = new String[]{
-            };
-        } else
-            queries = new String[]{args[0]};
-        
+            queries = new String[] {};
+        }
+        else
+            queries = new String[] { args[0] };
+
         for (String q : queries) {
             i++;
             try {
                 System.out.println(q);
                 String ql = "cosmas2";
                 jg.setCollection("pubDate=2014");
-                jg.run(q, ql, System.getProperty("user.home") + "/" + ql + "_" + i + ".jsonld");
+                jg.run(q, ql, System.getProperty("user.home") + "/" + ql + "_"
+                        + i + ".jsonld");
                 System.out.println();
-            } catch (NullPointerException npe) {
+            }
+            catch (NullPointerException npe) {
                 npe.printStackTrace();
                 System.out.println("null\n");
-            } catch (JsonGenerationException e) {
+            }
+            catch (JsonGenerationException e) {
                 e.printStackTrace();
-            } catch (JsonMappingException e) {
+            }
+            catch (JsonMappingException e) {
                 e.printStackTrace();
-            } catch (IOException e) {
+            }
+            catch (IOException e) {
                 e.printStackTrace();
             }
         }
     }
 
     /**
-     * Runs the QuerySerializer by initializing the relevant AbstractSyntaxTree implementation (depending on specified query language)
-     * and transforms and writes the tree's requestMap to the specified output file.
+     * Runs the QuerySerializer by initializing the relevant
+     * AbstractSyntaxTree implementation (depending on specified query
+     * language) and transforms and writes the tree's requestMap to
+     * the specified output file.
      *
-     * @param outFile       The file to which the serialization is written
-     * @param query         The query string
-     * @param queryLanguage The query language. As of 17 Dec 2014, this must be one of 'poliqarpplus', 'cosmas2', 'annis' or 'cql'. 
+     * @param outFile
+     *            The file to which the serialization is written
+     * @param query
+     *            The query string
+     * @param queryLanguage
+     *            The query language. As of 17 Dec 2014, this must be
+     *            one of 'poliqarpplus', 'cosmas2', 'annis' or 'cql'.
      * @throws IOException
      * @throws QueryException
      */
@@ -99,41 +110,56 @@
             throws IOException {
         if (queryLanguage.equalsIgnoreCase("poliqarp")) {
             ast = new PoliqarpPlusQueryProcessor(query);
-        } else if (queryLanguage.equalsIgnoreCase("cosmas2")) {
+        }
+        else if (queryLanguage.equalsIgnoreCase("cosmas2")) {
             ast = new Cosmas2QueryProcessor(query);
-        } else if (queryLanguage.equalsIgnoreCase("poliqarpplus")) {
+        }
+        else if (queryLanguage.equalsIgnoreCase("poliqarpplus")) {
             ast = new PoliqarpPlusQueryProcessor(query);
-        } else if (queryLanguage.equalsIgnoreCase("cql")) {
+        }
+        else if (queryLanguage.equalsIgnoreCase("cql")) {
             ast = new CqlQueryProcessor(query);
-        } else if (queryLanguage.equalsIgnoreCase("annis")) {
+        }
+        else if (queryLanguage.equalsIgnoreCase("annis")) {
             ast = new AnnisQueryProcessor(query);
-        } else {
-            throw new IllegalArgumentException(queryLanguage + " is not a supported query language!");
+        }
+        else {
+            throw new IllegalArgumentException(queryLanguage
+                    + " is not a supported query language!");
         }
         toJSON();
     }
 
     public QuerySerializer setQuery(String query, String ql, String version) {
-    	ast = new DummyQueryProcessor();
-    	if (query == null || query.isEmpty()) {
-			ast.addError(StatusCodes.NO_QUERY, "You did not specify a query!");
-		} else if (ql == null || ql.isEmpty()){
-            ast.addError(StatusCodes.NO_QUERY, "You did not specify any query language!");
-        } else if (ql.equalsIgnoreCase("poliqarp")) {
+        ast = new DummyQueryProcessor();
+        if (query == null || query.isEmpty()) {
+            ast.addError(StatusCodes.NO_QUERY, "You did not specify a query!");
+        }
+        else if (ql == null || ql.isEmpty()) {
+            ast.addError(StatusCodes.NO_QUERY,
+                    "You did not specify any query language!");
+        }
+        else if (ql.equalsIgnoreCase("poliqarp")) {
             ast = new PoliqarpPlusQueryProcessor(query);
-        } else if (ql.equalsIgnoreCase("cosmas2")) {
+        }
+        else if (ql.equalsIgnoreCase("cosmas2")) {
             ast = new Cosmas2QueryProcessor(query);
-        } else if (ql.equalsIgnoreCase("poliqarpplus")) {
+        }
+        else if (ql.equalsIgnoreCase("poliqarpplus")) {
             ast = new PoliqarpPlusQueryProcessor(query);
-        } else if (ql.equalsIgnoreCase("cql")) {
+        }
+        else if (ql.equalsIgnoreCase("cql")) {
             if (version == null)
                 ast = new CqlQueryProcessor(query);
             else
                 ast = new CqlQueryProcessor(query, version);
-        } else if (ql.equalsIgnoreCase("annis")) {
+        }
+        else if (ql.equalsIgnoreCase("annis")) {
             ast = new AnnisQueryProcessor(query);
-        } else {
-        	ast.addError(StatusCodes.UNKNOWN_QL, ql + " is not a supported query language!");
+        }
+        else {
+            ast.addError(StatusCodes.UNKNOWN_QL, ql
+                    + " is not a supported query language!");
         }
         return this;
     }
@@ -174,19 +200,16 @@
                 requestMap.put("warnings", warnings);
             }
             if (this.messages != null) {
-            	messages.addAll(this.messages);
+                messages.addAll(this.messages);
                 requestMap.put("messages", messages);
             }
-            
 
             return requestMap;
         }
         return new HashMap<>();
     }
 
-
-    public QuerySerializer addMeta(
-            String cli, String cri, int cls, int crs,
+    public QuerySerializer addMeta(String cli, String cri, int cls, int crs,
             int num, int pageIndex) {
         MetaQueryBuilder meta = new MetaQueryBuilder();
         meta.setSpanContext(cls, cli, crs, cri);
@@ -234,7 +257,8 @@
         return this;
     }
 
-    public QuerySerializer setDeprCollection(CollectionQueryBuilder collections) {
+    public QuerySerializer setDeprCollection
+            (CollectionQueryBuilder collections) {
         this.collection = collections.raw();
         return this;
     }
diff --git a/src/main/java/de/ids_mannheim/korap/query/serialize/QueryUtils.java b/src/main/java/de/ids_mannheim/korap/query/serialize/QueryUtils.java
index 537e578..c6d1ebb 100644
--- a/src/main/java/de/ids_mannheim/korap/query/serialize/QueryUtils.java
+++ b/src/main/java/de/ids_mannheim/korap/query/serialize/QueryUtils.java
@@ -17,388 +17,252 @@
  * @date 10/12/2013
  */
 public class QueryUtils {
-	//
-	//    /**
-	//     * Returns the category (or 'label') of the root of a ParseTree (ANTLR 4).
-	//     *
-	//     * @param node
-	//     * @return
-	//     */
-	//    public static String getNodeCat(ParseTree node) {
-	//        String nodeCat = node.toStringTree(parser);
-	//        Pattern p = Pattern.compile("\\((.*?)\\s"); // from opening parenthesis to 1st whitespace
-	//        Matcher m = p.matcher(node.toStringTree(parser));
-	//        if (m.find()) {
-	//            nodeCat = m.group(1);
-	//        }
-	//        return nodeCat;
-	//    }
-	//
-	//    /**
-	//     * Returns the category (or 'label') of the root of a ParseTree (ANTLR 3).
-	//     *
-	//     * @param node
-	//     * @return
-	//     */
-	//    public static String getNodeCat(Tree node) {
-	//        String nodeCat = node.toStringTree();
-	//        Pattern p = Pattern.compile("\\((.*?)\\s"); // from opening parenthesis to 1st whitespace
-	//        Matcher m = p.matcher(node.toStringTree());
-	//        if (m.find()) {
-	//            nodeCat = m.group(1);
-	//        }
-	//        return nodeCat;
-	//    }
-	//
-	//
-	//    /**
-	//     * Tests whether a certain node has a child by a certain name
-	//     *
-	//     * @param node     The parent node.
-	//     * @param childCat The category of the potential child.
-	//     * @return true iff one or more children belong to the specified category
-	//     */
-	//    public static boolean hasChild(Tree node, String childCat) {
-	//        for (int i = 0; i < node.getChildCount(); i++) {
-	//            if (getNodeCat(node.getChild(i)).equals(childCat)) {
-	//                return true;
-	//            }
-	//        }
-	//        return false;
-	//    }
-	//    
-	//    /**
-	//     * Tests whether a certain node has a child by a certain name
-	//     *
-	//     * @param node     The parent node.
-	//     * @param childCat The category of the potential child.
-	//     * @return true iff one or more children belong to the specified category
-	//     */
-	//    public static boolean hasChild(ParseTree node, String childCat) {
-	//        for (int i = 0; i < node.getChildCount(); i++) {
-	//            if (getNodeCat(node.getChild(i)).equals(childCat)) {
-	//                return true;
-	//            }
-	//        }
-	//        return false;
-	//    }
-	//
-	//    public static boolean hasDescendant(ParseTree node, String childCat) {
-	//        for (int i = 0; i < node.getChildCount(); i++) {
-	//            ParseTree child = node.getChild(i);
-	//            if (getNodeCat(child).equals(childCat)) {
-	//                return true;
-	//            }
-	//            if (hasDescendant(child, childCat)) {
-	//                return true;
-	//            }
-	//        }
-	//        return false;
-	//    }
-	//
-	//    public static List<Tree> getChildrenWithCat(Tree node, String nodeCat) {
-	//        ArrayList<Tree> children = new ArrayList<Tree>();
-	//        for (int i = 0; i < node.getChildCount(); i++) {
-	//            if (getNodeCat(node.getChild(i)).equals(nodeCat)) {
-	//                children.add(node.getChild(i));
-	//            }
-	//        }
-	//        return children;
-	//    }
-	//
-	//    public static List<ParseTree> getChildrenWithCat(ParseTree node, String nodeCat) {
-	//        ArrayList<ParseTree> children = new ArrayList<ParseTree>();
-	//        for (int i = 0; i < node.getChildCount(); i++) {
-	//            if (getNodeCat(node.getChild(i)).equals(nodeCat)) {
-	//                children.add(node.getChild(i));
-	//            }
-	//        }
-	//        return children;
-	//    }
-	//
-	//    public static List<ParseTree> getChildren(ParseTree node) {
-	//        ArrayList<ParseTree> children = new ArrayList<ParseTree>();
-	//        for (int i = 0; i < node.getChildCount(); i++) {
-	//                children.add(node.getChild(i));
-	//        }
-	//        return children;
-	//    }
-	//    
-	//    public static Tree getFirstChildWithCat(Tree node, String nodeCat) {
-	//        for (int i = 0; i < node.getChildCount(); i++) {
-	//            if (getNodeCat(node.getChild(i)).equals(nodeCat)) {
-	//                return node.getChild(i);
-	//            }
-	//        }
-	//        return null;
-	//    }
-	//
-	//    public static ParseTree getFirstChildWithCat(ParseTree node, String nodeCat) {
-	//        for (int i = 0; i < node.getChildCount(); i++) {
-	//            if (getNodeCat(node.getChild(i)).equals(nodeCat)) {
-	//                return node.getChild(i);
-	//            }
-	//        }
-	//        return null;
-	//    }
-	//    
-	//    /**
-	//     * Checks whether a node only serves as a container for another node (e.g. in (cq_segment ( cg_seg_occ ...)), the cq_segment node does not contain
-	//     * any information and only contains the cq_seg_occ node.  
-	//     * @param node The node to check
-	//     * @return true iff the node is a container only.
-	//     */
-	//    public static boolean isContainerOnly(ParseTree node) {
-	//    	String[] validNodeNamesArray = "cq_segment sq_segment element empty_segments".split(" ");
-	//    	List<String> validNodeNames = Arrays.asList(validNodeNamesArray);
-	//    	List<ParseTree> children = getChildren(node);
-	//    	for (ParseTree child : children) {
-	//    		if (validNodeNames.contains(getNodeCat(child))) {
-	//    			return false;
-	//    		}
-	//    	}
-	//    	return true;
-	//    }
 
-	public static SimpleEntry<String, Integer> checkUnbalancedPars(String q) {
-		Map<Character, Character> brackets = new HashMap<Character, Character>();
-		brackets.put('[', ']');
-		brackets.put('{', '}');
-		brackets.put('(', ')');
-		Set<Character> allChars = new HashSet<Character>();
-		allChars.addAll(brackets.keySet());
-		allChars.addAll(brackets.values());
-		int lastOpenBracket = 0;
-		
-		final Stack<Character> stack = new Stack<Character>();
-		for (int i = 0; i < q.length(); i++) {
-			if (!allChars.contains(q.charAt(i))) continue;
-			if (brackets.containsKey(q.charAt(i))) {
-				stack.push(q.charAt(i));
-				lastOpenBracket = i;
-			} else if (stack.empty() || (q.charAt(i) != brackets.get(stack.pop()))) {
-				return new SimpleEntry<String, Integer>("Parantheses/brackets unbalanced.",i);
-			} 
-		}
-		if (!stack.empty()) return new SimpleEntry<String, Integer>("Parantheses/brackets unbalanced.", lastOpenBracket);
-		return null;
-	}
+    public static SimpleEntry<String, Integer> checkUnbalancedPars(String q) {
+        Map<Character, Character> brackets = new HashMap<Character, Character>();
+        brackets.put('[', ']');
+        brackets.put('{', '}');
+        brackets.put('(', ')');
+        Set<Character> allChars = new HashSet<Character>();
+        allChars.addAll(brackets.keySet());
+        allChars.addAll(brackets.values());
+        int lastOpenBracket = 0;
 
-	public static List<String> parseMorph(String stringTree) {
+        final Stack<Character> stack = new Stack<Character>();
+        for (int i = 0; i < q.length(); i++) {
+            if (!allChars.contains(q.charAt(i)))
+                continue;
+            if (brackets.containsKey(q.charAt(i))) {
+                stack.push(q.charAt(i));
+                lastOpenBracket = i;
+            }
+            else if (stack.empty()
+                    || (q.charAt(i) != brackets.get(stack.pop()))) {
+                return new SimpleEntry<String, Integer>(
+                        "Parantheses/brackets unbalanced.", i);
+            }
+        }
+        if (!stack.empty())
+            return new SimpleEntry<String, Integer>(
+                    "Parantheses/brackets unbalanced.", lastOpenBracket);
+        return null;
+    }
 
-		ArrayList<String> morph = new ArrayList<String>();
-		return morph;
-	}
+    public static List<String> parseMorph(String stringTree) {
 
+        ArrayList<String> morph = new ArrayList<String>();
+        return morph;
+    }
 
-	public static String buildCypherQuery(String cypher, String ctypel, String ctyper,
-			int cl, int cr, int page, int limit) {
-		//todo: implies that there is only one type allowed!
-		String sctypel = "", sctyper = "";
-		switch (ctypel) {
-		case "C":
-			sctypel = "chars";
-			break;
-		case "T":
-			sctypel = "tokens";
-			break;
-		}
-		switch (ctyper) {
-		case "C":
-			sctyper = "chars";
-			break;
-		case "T":
-			sctyper = "tokens";
-			break;
-		}
+    public static String buildCypherQuery(String cypher, String ctypel,
+            String ctyper, int cl, int cr, int page, int limit) {
+        // todo: implies that there is only one type allowed!
+        String sctypel = "", sctyper = "";
+        switch (ctypel) {
+            case "C":
+                sctypel = "chars";
+                break;
+            case "T":
+                sctypel = "tokens";
+                break;
+        }
+        switch (ctyper) {
+            case "C":
+                sctyper = "chars";
+                break;
+            case "T":
+                sctyper = "tokens";
+                break;
+        }
 
-		StringBuffer buffer = new StringBuffer();
-		buffer.append("<query><cypher><![CDATA[");
-		buffer.append(cypher);
-		buffer.append("]]></cypher>");
-		buffer.append("<wordAliasPrefix>wtok_</wordAliasPrefix>");
-		buffer.append("<contextColumn>sent</contextColumn>");
-		buffer.append("<contextIdColumn>sid</contextIdColumn>");
-		buffer.append("<textColumn>txt</textColumn>");
-		buffer.append("<startIndex>");
-		buffer.append(page);
-		buffer.append("</startIndex>");
-		buffer.append("<itemsPerPage>");
-		buffer.append(limit);
-		buffer.append("</itemsPerPage>");
-		buffer.append("<context>");
-		buffer.append("<left>");
-		buffer.append("<" + sctypel + ">");
-		buffer.append(cl);
-		buffer.append("</" + sctypel + ">");
-		buffer.append("</left>");
-		buffer.append("<right>");
-		buffer.append("<" + sctyper + ">");
-		buffer.append(cr);
-		buffer.append("</" + sctyper + ">");
-		buffer.append("</right>");
-		buffer.append("</context>");
-		buffer.append("</query>");
-		return buffer.toString();
-	}
+        StringBuffer buffer = new StringBuffer();
+        buffer.append("<query><cypher><![CDATA[");
+        buffer.append(cypher);
+        buffer.append("]]></cypher>");
+        buffer.append("<wordAliasPrefix>wtok_</wordAliasPrefix>");
+        buffer.append("<contextColumn>sent</contextColumn>");
+        buffer.append("<contextIdColumn>sid</contextIdColumn>");
+        buffer.append("<textColumn>txt</textColumn>");
+        buffer.append("<startIndex>");
+        buffer.append(page);
+        buffer.append("</startIndex>");
+        buffer.append("<itemsPerPage>");
+        buffer.append(limit);
+        buffer.append("</itemsPerPage>");
+        buffer.append("<context>");
+        buffer.append("<left>");
+        buffer.append("<" + sctypel + ">");
+        buffer.append(cl);
+        buffer.append("</" + sctypel + ">");
+        buffer.append("</left>");
+        buffer.append("<right>");
+        buffer.append("<" + sctyper + ">");
+        buffer.append(cr);
+        buffer.append("</" + sctyper + ">");
+        buffer.append("</right>");
+        buffer.append("</context>");
+        buffer.append("</query>");
+        return buffer.toString();
+    }
 
-	public static String buildDotQuery(long sid, String graphdb_id) {
-		StringBuffer b = new StringBuffer();
-		b.append("<query>");
-		b.append("<sentenceId>");
-		b.append(sid);
-		b.append("</sentenceId>");
-		b.append("<gdbId>");
-		b.append(graphdb_id);
-		b.append("</gdbId>");
-		b.append("<hls>");
-		b.append("<hl>");
-		b.append(40857);
-		b.append("</hl>");
-		b.append("<hl>");
-		b.append(40856);
-		b.append("</hl>");
-		b.append("</hls>");
-		b.append("</query>");
+    public static String buildDotQuery(long sid, String graphdb_id) {
+        StringBuffer b = new StringBuffer();
+        b.append("<query>");
+        b.append("<sentenceId>");
+        b.append(sid);
+        b.append("</sentenceId>");
+        b.append("<gdbId>");
+        b.append(graphdb_id);
+        b.append("</gdbId>");
+        b.append("<hls>");
+        b.append("<hl>");
+        b.append(40857);
+        b.append("</hl>");
+        b.append("<hl>");
+        b.append(40856);
+        b.append("</hl>");
+        b.append("</hls>");
+        b.append("</query>");
 
-		return b.toString();
-	}
+        return b.toString();
+    }
 
-	public String buildaggreQuery(String query) {
-		StringBuffer b = new StringBuffer();
-		b.append("<query><cypher><![CDATA[");
-		b.append(query);
-		b.append("]]></cypher>");
-		b.append("<columns>");
-		b.append("<column agg='true' sum='false'>");
-		b.append("<cypherAlias>");
-		b.append("aggBy");
-		b.append("</cypherAlias>");
-		b.append("<displayName>");
-		b.append("Aggregate");
-		b.append("</displayName>");
-		b.append("</column>");
+    public String buildaggreQuery(String query) {
+        StringBuffer b = new StringBuffer();
+        b.append("<query><cypher><![CDATA[");
+        b.append(query);
+        b.append("]]></cypher>");
+        b.append("<columns>");
+        b.append("<column agg='true' sum='false'>");
+        b.append("<cypherAlias>");
+        b.append("aggBy");
+        b.append("</cypherAlias>");
+        b.append("<displayName>");
+        b.append("Aggregate");
+        b.append("</displayName>");
+        b.append("</column>");
 
-		b.append("<column agg='fals' sum='true'>");
-		b.append("<cypherAlias>");
-		b.append("cnt");
-		b.append("</cypherAlias>");
-		b.append("<displayName>");
-		b.append("Count");
-		b.append("</displayName>");
-		b.append("</column>");
-		b.append("</columns>");
+        b.append("<column agg='fals' sum='true'>");
+        b.append("<cypherAlias>");
+        b.append("cnt");
+        b.append("</cypherAlias>");
+        b.append("<displayName>");
+        b.append("Count");
+        b.append("</displayName>");
+        b.append("</column>");
+        b.append("</columns>");
 
-		b.append("</query>");
-		return b.toString();
-	}
+        b.append("</query>");
+        return b.toString();
+    }
 
-	@Deprecated
-	public static Map addParameters(Map request, int page, int num, String cli, String cri,
-			int cls, int crs, boolean cutoff) {
-		Map ctx = new LinkedHashMap();
-		List left = new ArrayList();
-		left.add(cli);
-		left.add(cls);
-		List right = new ArrayList();
-		right.add(cri);
-		right.add(crs);
-		ctx.put("left", left);
-		ctx.put("right", right);
+    @Deprecated
+    public static Map addParameters(Map request, int page, int num, String cli,
+            String cri, int cls, int crs, boolean cutoff) {
+        Map ctx = new LinkedHashMap();
+        List left = new ArrayList();
+        left.add(cli);
+        left.add(cls);
+        List right = new ArrayList();
+        right.add(cri);
+        right.add(crs);
+        ctx.put("left", left);
+        ctx.put("right", right);
 
-		request.put("startPage", page);
-		request.put("count", num);
-		request.put("context", ctx);
-		request.put("cutOff", cutoff);
+        request.put("startPage", page);
+        request.put("count", num);
+        request.put("context", ctx);
+        request.put("cutOff", cutoff);
 
-		return request;
-	}
+        return request;
+    }
 
-	public static void prepareContext(LinkedHashMap<String, Object> requestMap) {
-		LinkedHashMap<String, Object> context = new LinkedHashMap<String, Object>();
+    public static void prepareContext(LinkedHashMap<String, Object> requestMap) {
+        LinkedHashMap<String, Object> context = new LinkedHashMap<String, Object>();
 
-		LinkedHashMap<String, Object> classMap = new LinkedHashMap<String, Object>();
-		LinkedHashMap<String, Object> operands = new LinkedHashMap<String, Object>();
-		LinkedHashMap<String, Object> operation = new LinkedHashMap<String, Object>();
-		LinkedHashMap<String, Object> frame = new LinkedHashMap<String, Object>();
-		LinkedHashMap<String, Object> classRef = new LinkedHashMap<String, Object>();
-		LinkedHashMap<String, Object> spanRef = new LinkedHashMap<String, Object>();
-		LinkedHashMap<String, Object> classRefOp = new LinkedHashMap<String, Object>();
-		LinkedHashMap<String, Object> min = new LinkedHashMap<String, Object>();
-		LinkedHashMap<String, Object> max = new LinkedHashMap<String, Object>();
-		LinkedHashMap<String, Object> exclude = new LinkedHashMap<String, Object>();
-		LinkedHashMap<String, Object> distances = new LinkedHashMap<String, Object>();
-		LinkedHashMap<String, Object> inOrder = new LinkedHashMap<String, Object>();
+        LinkedHashMap<String, Object> classMap = new LinkedHashMap<String, Object>();
+        LinkedHashMap<String, Object> operands = new LinkedHashMap<String, Object>();
+        LinkedHashMap<String, Object> operation = new LinkedHashMap<String, Object>();
+        LinkedHashMap<String, Object> frame = new LinkedHashMap<String, Object>();
+        LinkedHashMap<String, Object> classRef = new LinkedHashMap<String, Object>();
+        LinkedHashMap<String, Object> spanRef = new LinkedHashMap<String, Object>();
+        LinkedHashMap<String, Object> classRefOp = new LinkedHashMap<String, Object>();
+        LinkedHashMap<String, Object> min = new LinkedHashMap<String, Object>();
+        LinkedHashMap<String, Object> max = new LinkedHashMap<String, Object>();
+        LinkedHashMap<String, Object> exclude = new LinkedHashMap<String, Object>();
+        LinkedHashMap<String, Object> distances = new LinkedHashMap<String, Object>();
+        LinkedHashMap<String, Object> inOrder = new LinkedHashMap<String, Object>();
 
-		operation.put("@id", "group:operation/");
-		operation.put("@type", "@id");
+        operation.put("@id", "group:operation/");
+        operation.put("@type", "@id");
 
-		classMap.put("@id", "group:class");
-		classMap.put("@type", "xsd:integer");
+        classMap.put("@id", "group:class");
+        classMap.put("@type", "xsd:integer");
 
-		operands.put("@id", "group:operands");
-		operands.put("@container", "@list");
+        operands.put("@id", "group:operands");
+        operands.put("@container", "@list");
 
-		frame.put("@id", "group:frame/");
-		frame.put("@type", "@id");
+        frame.put("@id", "group:frame/");
+        frame.put("@type", "@id");
 
-		classRef.put("@id", "group:classRef");
-		classRef.put("@type", "xsd:integer");
+        classRef.put("@id", "group:classRef");
+        classRef.put("@type", "xsd:integer");
 
-		spanRef.put("@id", "group:spanRef");
-		spanRef.put("@type", "xsd:integer");
+        spanRef.put("@id", "group:spanRef");
+        spanRef.put("@type", "xsd:integer");
 
-		classRefOp.put("@id", "group:classRefOp");
-		classRefOp.put("@type", "@id");
+        classRefOp.put("@id", "group:classRefOp");
+        classRefOp.put("@type", "@id");
 
-		min.put("@id", "boundary:min");
-		min.put("@type", "xsd:integer");
+        min.put("@id", "boundary:min");
+        min.put("@type", "xsd:integer");
 
-		max.put("@id", "boundary:max");
-		max.put("@type", "xsd:integer");
+        max.put("@id", "boundary:max");
+        max.put("@type", "xsd:integer");
 
-		exclude.put("@id", "group:exclude");
-		exclude.put("@type", "xsd:boolean");
+        exclude.put("@id", "group:exclude");
+        exclude.put("@type", "xsd:boolean");
 
-		distances.put("@id", "group:distances");
-		distances.put("@container", "@list");
+        distances.put("@id", "group:distances");
+        distances.put("@container", "@list");
 
-		inOrder.put("@id", "group:inOrder");
-		inOrder.put("@type", "xsd:boolean");
+        inOrder.put("@id", "group:inOrder");
+        inOrder.put("@type", "xsd:boolean");
 
-		context.put("korap", "http://korap.ids-mannheim.de/ns/KorAP/json-ld/v0.1/");
-		context.put("boundary", "korap:boundary/");
-		context.put("group", "korap:group/");
-		context.put("operation", operation);
-		context.put("class", classMap);
-		context.put("operands", operands);
-		context.put("frame", frame);
-		context.put("classRef", classRef);
-		context.put("spanRef", spanRef);
-		context.put("classRefOp", classRefOp);
-		context.put("min", min);
-		context.put("max", max);
-		context.put("exclude", exclude);
-		context.put("distances", distances);
-		context.put("inOrder", inOrder);
+        context.put("korap",
+                "http://korap.ids-mannheim.de/ns/KorAP/json-ld/v0.1/");
+        context.put("boundary", "korap:boundary/");
+        context.put("group", "korap:group/");
+        context.put("operation", operation);
+        context.put("class", classMap);
+        context.put("operands", operands);
+        context.put("frame", frame);
+        context.put("classRef", classRef);
+        context.put("spanRef", spanRef);
+        context.put("classRefOp", classRefOp);
+        context.put("min", min);
+        context.put("max", max);
+        context.put("exclude", exclude);
+        context.put("distances", distances);
+        context.put("inOrder", inOrder);
 
-		requestMap.put("@context", context);
-	}
+        requestMap.put("@context", context);
+    }
 
-	public static String escapeRegexSpecialChars(String key) {
-		key.replace("\\", "\\\\");
-		Pattern p = Pattern.compile("\\.|\\^|\\$|\\||\\?|\\*|\\+|\\(|\\)|\\[|\\]|\\{|\\}");
-		Matcher m = p.matcher(key);
-		while (m.find()) {
-			System.out.println(m.group(0));
-			String match = m.group();
-			System.out.println(key);
-			key = m.replaceAll("\\\\"+match);
-			System.out.println(" > "+key);
-		}
-		return key;
-	}
-
+    public static String escapeRegexSpecialChars(String key) {
+        key.replace("\\", "\\\\");
+        Pattern p = Pattern
+                .compile("\\.|\\^|\\$|\\||\\?|\\*|\\+|\\(|\\)|\\[|\\]|\\{|\\}");
+        Matcher m = p.matcher(key);
+        while (m.find()) {
+            System.out.println(m.group(0));
+            String match = m.group();
+            System.out.println(key);
+            key = m.replaceAll("\\\\" + match);
+            System.out.println(" > " + key);
+        }
+        return key;
+    }
 
 
 }
diff --git a/src/main/java/de/ids_mannheim/korap/query/serialize/util/Antlr3DescriptiveErrorListener.java b/src/main/java/de/ids_mannheim/korap/query/serialize/util/Antlr3DescriptiveErrorListener.java
index 7a2d3bb..cf81273 100644
--- a/src/main/java/de/ids_mannheim/korap/query/serialize/util/Antlr3DescriptiveErrorListener.java
+++ b/src/main/java/de/ids_mannheim/korap/query/serialize/util/Antlr3DescriptiveErrorListener.java
@@ -9,81 +9,86 @@
 import de.ids_mannheim.korap.query.serialize.QueryUtils;
 
 /**
- * Custom descriptive error listener for Antlr3 grammars. Requires interface IErrorReporter to be present in 
- * grammar destination (generated source directory).
+ * Custom descriptive error listener for Antlr3 grammars. Requires
+ * interface IErrorReporter to be present in grammar destination
+ * (generated source directory).
+ * 
  * @author Joachim Bingel (bingel@ids-mannheim.de)
  *
  */
 public class Antlr3DescriptiveErrorListener implements IErrorReporter {
 
-	private String query;
-	private String offendingSymbol;
-	private String expected;
-	private int charPosition;
+    private String query;
+    private String offendingSymbol;
+    private String expected;
+    private int charPosition;
 
-	public Antlr3DescriptiveErrorListener(String query) {
-		this.query = query;
-	};
+    public Antlr3DescriptiveErrorListener (String query) {
+        this.query = query;
+    };
 
-	@Override
-	public void reportError(String error) {
-		String charPositionStr = null;
-		String offendingSymbol = null;
-		String expected = null;
-		Pattern p = Pattern.compile("line \\d+:(\\d+).* '(.+?)' expecting (.+)");
-		Matcher m = p.matcher(error);
-		if (m.find()) {
-			charPositionStr = m.group(1);
-			offendingSymbol = m.group(2);
-			expected = m.group(3);
-		}
-		if (charPositionStr != null)
-			this.charPosition = Integer.parseInt(charPositionStr);
-		if (offendingSymbol != null)
-			this.offendingSymbol = offendingSymbol;
-		if (expected != null)
-			this.expected = expected;
-	}
+    @Override
+    public void reportError(String error) {
+        String charPositionStr = null;
+        String offendingSymbol = null;
+        String expected = null;
+        Pattern p = Pattern
+                .compile("line \\d+:(\\d+).* '(.+?)' expecting (.+)");
+        Matcher m = p.matcher(error);
+        if (m.find()) {
+            charPositionStr = m.group(1);
+            offendingSymbol = m.group(2);
+            expected = m.group(3);
+        }
+        if (charPositionStr != null)
+            this.charPosition = Integer.parseInt(charPositionStr);
+        if (offendingSymbol != null)
+            this.offendingSymbol = offendingSymbol;
+        if (expected != null)
+            this.expected = expected;
+    }
 
-	public ArrayList<Object> generateFullErrorMsg() {
-		ArrayList<Object> errorSpecs = new ArrayList<Object>();
-		String msg = getDetailedErrorMessage(); 
-		errorSpecs.add(StatusCodes.MALFORMED_QUERY);
-		errorSpecs.add(msg);
-		errorSpecs.add(getCharPosition());
-		return errorSpecs;
-	}
+    public ArrayList<Object> generateFullErrorMsg() {
+        ArrayList<Object> errorSpecs = new ArrayList<Object>();
+        String msg = getDetailedErrorMessage();
+        errorSpecs.add(StatusCodes.MALFORMED_QUERY);
+        errorSpecs.add(msg);
+        errorSpecs.add(getCharPosition());
+        return errorSpecs;
+    }
 
-	private String getDetailedErrorMessage() {
-		// default message, in case no detailed info is available;
-		String msg = "Malformed query. Could not parse."; 
-		char offendingSymbol = query.charAt(0);
-		if (query.length() > charPosition) offendingSymbol = query.charAt(charPosition);
-		msg = "Failing to parse at symbol: '"+offendingSymbol+"'";
-		if (expected != null) {
-			if (expected.equals("EOF") || expected.equals("<EOF>")) {
-				msg += " Expected end of query.";
-			} else {
-				msg += " Expected '"+expected+"'";
-			}
-		}
-		// check for unbalanced parantheses
-		SimpleEntry<String, Integer> unbalanced = QueryUtils.checkUnbalancedPars(query); 
-		if (unbalanced != null) {
-			msg = unbalanced.getKey();
-			charPosition = unbalanced.getValue();
-		}
-		// check if more more arguments expected before closing operator
-		if (String.valueOf(offendingSymbol).equals(")")) {
-			msg = "Early closing parenthesis. Possibly lacking arguments for operator.";
-		}
+    private String getDetailedErrorMessage() {
+        // default message, in case no detailed info is available;
+        String msg = "Malformed query. Could not parse.";
+        char offendingSymbol = query.charAt(0);
+        if (query.length() > charPosition)
+            offendingSymbol = query.charAt(charPosition);
+        msg = "Failing to parse at symbol: '" + offendingSymbol + "'";
+        if (expected != null) {
+            if (expected.equals("EOF") || expected.equals("<EOF>")) {
+                msg += " Expected end of query.";
+            }
+            else {
+                msg += " Expected '" + expected + "'";
+            }
+        }
+        // check for unbalanced parantheses
+        SimpleEntry<String, Integer> unbalanced = QueryUtils
+                .checkUnbalancedPars(query);
+        if (unbalanced != null) {
+            msg = unbalanced.getKey();
+            charPosition = unbalanced.getValue();
+        }
+        // check if more more arguments expected before closing
+        // operator
+        if (String.valueOf(offendingSymbol).equals(")")) {
+            msg = "Early closing parenthesis. Possibly lacking arguments for operator.";
+        }
+        return msg;
+    }
 
-		return msg;
-	}
-
-	public int getCharPosition() {
-		return charPosition;
-	}
-
+    public int getCharPosition() {
+        return charPosition;
+    }
 
 }
diff --git a/src/main/java/de/ids_mannheim/korap/query/serialize/util/Antlr4DescriptiveErrorListener.java b/src/main/java/de/ids_mannheim/korap/query/serialize/util/Antlr4DescriptiveErrorListener.java
index fd38556..7fa82aa 100644
--- a/src/main/java/de/ids_mannheim/korap/query/serialize/util/Antlr4DescriptiveErrorListener.java
+++ b/src/main/java/de/ids_mannheim/korap/query/serialize/util/Antlr4DescriptiveErrorListener.java
@@ -10,66 +10,65 @@
 import de.ids_mannheim.korap.query.serialize.QueryUtils;
 
 public class Antlr4DescriptiveErrorListener extends BaseErrorListener {
-    
+
     String query;
     String message;
-	int line;
+    int line;
     int charPosition;
-        
-    public Antlr4DescriptiveErrorListener(String query) {
-    	this.query = query;
+
+    public Antlr4DescriptiveErrorListener (String query) {
+        this.query = query;
     };
 
     @Override
-    public void syntaxError(Recognizer<?, ?> recognizer, Object offendingSymbol,
-                            int line, int charPositionInLine,
-                            String msg, RecognitionException e)
-    {
-    	this.message = msg;
-    	this.line = line;
-    	this.charPosition = charPositionInLine;
+    public void syntaxError(Recognizer<?, ?> recognizer,
+            Object offendingSymbol, int line, int charPositionInLine,
+            String msg, RecognitionException e) {
+        this.message = msg;
+        this.line = line;
+        this.charPosition = charPositionInLine;
     }
-    
+
     public String getMessage() {
-		return message;
-	}
+        return message;
+    }
 
-	public int getLine() {
-		return line;
-	}
+    public int getLine() {
+        return line;
+    }
 
-	public int getCharPosition() {
-		return charPosition;
-	}
-	
-	public ArrayList<Object> generateFullErrorMsg() {
-		ArrayList<Object> errorSpecs = new ArrayList<Object>();
-		String msg = getDetailedErrorMessage(); 
-		errorSpecs.add(StatusCodes.MALFORMED_QUERY);
-		errorSpecs.add(msg);
-		errorSpecs.add(getCharPosition());
-		return errorSpecs;
-	}
+    public int getCharPosition() {
+        return charPosition;
+    }
 
-	private String getDetailedErrorMessage() {
-		// default message, in case no detailed info is available;
-		String msg = "Malformed query. Could not parse."; 
-		char offendingSymbol = query.charAt(0);
-		if (query.length() > charPosition) offendingSymbol = query.charAt(charPosition);
-		msg = "Failing to parse at symbol: '"+offendingSymbol+"'";
-		// check for unbalanced parantheses
-		SimpleEntry<String, Integer> unbalanced = QueryUtils.checkUnbalancedPars(query); 
-		if (unbalanced != null) {
-			msg = unbalanced.getKey();
-			charPosition = unbalanced.getValue();
-		}
-		// check if more more arguments expected before closing operator
-		if (String.valueOf(offendingSymbol).equals(")")) {
-			msg = "Early closing parenthesis. Possibly lacking arguments for operator.";
-		}
-		return msg;
-	}
-	
-  
+    public ArrayList<Object> generateFullErrorMsg() {
+        ArrayList<Object> errorSpecs = new ArrayList<Object>();
+        String msg = getDetailedErrorMessage();
+        errorSpecs.add(StatusCodes.MALFORMED_QUERY);
+        errorSpecs.add(msg);
+        errorSpecs.add(getCharPosition());
+        return errorSpecs;
+    }
+
+    private String getDetailedErrorMessage() {
+        // default message, in case no detailed info is available;
+        String msg = "Malformed query. Could not parse.";
+        char offendingSymbol = query.charAt(0);
+        if (query.length() > charPosition)
+            offendingSymbol = query.charAt(charPosition);
+        msg = "Failing to parse at symbol: '" + offendingSymbol + "'";
+        // check for unbalanced parantheses
+        SimpleEntry<String, Integer> unbalanced = QueryUtils
+                .checkUnbalancedPars(query);
+        if (unbalanced != null) {
+            msg = unbalanced.getKey();
+            charPosition = unbalanced.getValue();
+        }
+        // check if more more arguments expected before closing operator
+        if (String.valueOf(offendingSymbol).equals(")")) {
+            msg = "Early closing parenthesis. Possibly lacking arguments for operator.";
+        }
+        return msg;
+    }
 
 }
\ No newline at end of file
diff --git a/src/main/java/de/ids_mannheim/korap/query/serialize/util/CosmasCondition.java b/src/main/java/de/ids_mannheim/korap/query/serialize/util/CosmasCondition.java
index 454e9f6..4c79500 100644
--- a/src/main/java/de/ids_mannheim/korap/query/serialize/util/CosmasCondition.java
+++ b/src/main/java/de/ids_mannheim/korap/query/serialize/util/CosmasCondition.java
@@ -20,6 +20,7 @@
 		elem = nodeString.substring(0, 1);
 		nodeString = nodeString.substring(1);
 		
-		position = nodeString.equals("a") ? "frames:startswith" : "frames:endswith";
+		position = nodeString.equals("a") ? 
+		        "frames:startswith" : "frames:endswith";
 	}
 }
diff --git a/src/main/java/de/ids_mannheim/korap/query/serialize/util/CqlfObjectGenerator.java b/src/main/java/de/ids_mannheim/korap/query/serialize/util/CqlfObjectGenerator.java
index abc4511..50b71a6 100644
--- a/src/main/java/de/ids_mannheim/korap/query/serialize/util/CqlfObjectGenerator.java
+++ b/src/main/java/de/ids_mannheim/korap/query/serialize/util/CqlfObjectGenerator.java
@@ -8,265 +8,299 @@
 
 public class CqlfObjectGenerator {
 
-	protected static final Integer MAXIMUM_DISTANCE = 100; 
-	private static AbstractQueryProcessor qp;
-	
-	public static void setQueryProcessor(AbstractQueryProcessor qp) {
-		CqlfObjectGenerator.qp = qp;
-	}
-	
-	public static LinkedHashMap<String, Object> makeSpan() {
-		LinkedHashMap<String, Object> span = new LinkedHashMap<String, Object>();
-		span.put("@type", "korap:span");
-		return span;
-	}
-	
-	public static LinkedHashMap<String, Object> makeSpan(String key) {
-		LinkedHashMap<String, Object> span = new LinkedHashMap<String, Object>();
-		span.put("@type", "korap:span");
-		span.put("key", key);
-		return span;
-	}
-	
-	public static LinkedHashMap<String, Object> makeTerm() {
-		LinkedHashMap<String, Object> term = new LinkedHashMap<String, Object>();
-		term.put("@type", "korap:term");
-		return term;
-	}
-	
-	public static LinkedHashMap<String, Object> makeTermGroup(String relation) {
-		LinkedHashMap<String, Object> term = new LinkedHashMap<String, Object>();
-		term.put("@type", "korap:termGroup");
-		term.put("relation", "relation:"+relation);
-		term.put("operands", new ArrayList<Object>());
-		return term;
-	}
-	
-	public static LinkedHashMap<String, Object> makeDoc() {
-		LinkedHashMap<String, Object> term = new LinkedHashMap<String, Object>();
-		term.put("@type", "korap:doc");
-		return term;
-	}
-	
-	public static LinkedHashMap<String, Object> makeDocGroup(String relation) {
-		LinkedHashMap<String, Object> term = new LinkedHashMap<String, Object>();
-		term.put("@type", "korap:docGroup");
-		term.put("operation", "operation:"+relation);
-		term.put("operands", new ArrayList<Object>());
-		return term;
-	}
-	
-	public static LinkedHashMap<String, Object> makeToken() {
-		LinkedHashMap<String, Object> token = new LinkedHashMap<String, Object>();
-		token.put("@type", "korap:token");
-		return token;
-	}
-	
-	public static LinkedHashMap<String, Object> makeGroup(String operation) {
-		LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
-		group.put("@type", "korap:group");
-		group.put("operation", "operation:"+operation);
-		group.put("operands", new ArrayList<Object>());
-		return group;
-	}
-	
-	public static LinkedHashMap<String, Object> makeRepetition(Integer min, Integer max) {
-		LinkedHashMap<String, Object> group = makeGroup("repetition");
-		group.put("boundary", makeBoundary(min, max));
-		group.put("min", min);
-		return group;
-	}
-	
-	@Deprecated
-	public static LinkedHashMap<String, Object> makePosition(String frame) {
-		LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
-		group.put("@type", "korap:group");
-		group.put("operation", "operation:position");
-		group.put("frame", "frame:"+frame);
-		group.put("operands", new ArrayList<Object>());
-		return group;
-	}
-	
-	public static LinkedHashMap<String, Object> makePosition(String[] allowedFrames, String[] classRefCheck) {
-		LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
-		group.put("@type", "korap:group");
-		group.put("operation", "operation:position");
-		group.put("frames", Arrays.asList(allowedFrames));
-		group.put("operands", new ArrayList<Object>());
-		return group;
-	}
-	
-	public static LinkedHashMap<String, Object> makeSpanClass(int classCount) {
-		return makeSpanClass(classCount, true);
-	}
-	
-	public static LinkedHashMap<String, Object> makeSpanClass(int classCount, boolean setBySystem) {
-		LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
-		group.put("@type", "korap:group");
-		group.put("operation", "operation:class");
-		if (setBySystem) {
-			group.put("class", 128+classCount);
-			group.put("classOut", 128+classCount);
-			qp.addMessage("A class has been introduced into the backend representation of " +
-					"your query for later reference to a part of the query. The class id is "+(128+classCount));
-		} else {
-			group.put("class", classCount);
-			group.put("classOut", classCount);
-		}
-		qp.addMessage(StatusCodes.DEPRECATED_QUERY_ELEMENT, "Deprecated 2014-10-07: 'class' only to be supported until 3 months from deprecation date. " +
-				"Classes are now defined using the 'classOut' attribute.");
-		group.put("operands", new ArrayList<Object>());
-		return group;
-	}
-	
-	public static LinkedHashMap<String, Object> makeClassRefCheck(ArrayList<String> check, Integer[] classIn, int classOut) {
-		LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
-		group.put("@type", "korap:group");
-		group.put("operation", "operation:class");
-		group.put("classRefCheck", check);
-		group.put("classIn", Arrays.asList(classIn));
-		group.put("classOut", classOut);
-		group.put("class", classOut);
-		qp.addMessage(StatusCodes.DEPRECATED_QUERY_ELEMENT, "Deprecated 2014-10-07: 'class' only to be supported until 3 months from deprecation date. " +
-				"Classes are now defined using the 'classOut' attribute.");
-		group.put("operands", new ArrayList<Object>());
-		return group;
-	}
-	
-	public static LinkedHashMap<String, Object> makeClassRefOp(String operation, Integer[] classIn, int classOut) {
-		LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
-		group.put("@type", "korap:group");
-		group.put("operation", "operation:class");
-		group.put("classRefOp", operation);
-		group.put("classIn", Arrays.asList(classIn));
-		group.put("classOut", classOut);
-		group.put("operands", new ArrayList<Object>());
-		return group;
-	}
-	
-	@Deprecated
-	public static LinkedHashMap<String, Object> makeTreeRelation(String reltype) {
-		LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
-		group.put("@type", "korap:treeRelation");
-		if (reltype != null) group.put("reltype", reltype);
-		return group;
-	}
-	
-	public static LinkedHashMap<String, Object> makeRelation() {
-		LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
-		group.put("@type", "korap:relation");
-		return group;
-	}
-	
-	public static LinkedHashMap<String, Object> makeBoundary(Integer min, Integer max) {
-		LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
-		group.put("@type", "korap:boundary");
-		group.put("min", min);
-		if (max != null) {
-			group.put("max", max);
-		}
-		return group;
-	}
+    protected static final Integer MAXIMUM_DISTANCE = 100;
+    private static AbstractQueryProcessor qp;
 
-	public static LinkedHashMap<String, Object> makeDistance(String key, Integer min, Integer max) {
-		LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
-		if (key.equals("w")) {
-			group.put("@type", "korap:distance");
-		} else {
-			group.put("@type", "cosmas:distance");
-		}
-		group.put("key", key);
-		group.put("boundary", makeBoundary(min, max));
-		group.put("min", min);
-		if (max != null) {
-			group.put("max", max);
-		}
-		qp.addMessage(StatusCodes.DEPRECATED_QUERY_ELEMENT, "Deprecated 2014-07-24: 'min' and 'max' to be supported until 3 months from deprecation date.");
-		return group;
-	}
-	
-	public static LinkedHashMap<String, Object> makeReference(ArrayList<Integer> classRefs, String operation) {
-		LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
-		group.put("@type", "korap:reference");
-		group.put("operation", "operation:"+operation);
-		if (classRefs!= null && !classRefs.isEmpty()) {
-			group.put("classRef", classRefs);
-		}
-		return group;
-	}
-	
-	public static LinkedHashMap<String, Object> makeReference(ArrayList<Integer> classRefs) {
-		return makeReference(classRefs, "focus");
-	}
-	
-	public static LinkedHashMap<String, Object> makeReference(int classRef, String operation, boolean setBySystem) {
-		ArrayList<Integer> classRefs = new ArrayList<Integer>();
-		if (setBySystem) classRef = classRef+128;
-		classRefs.add(classRef);
-		return makeReference(classRefs, operation);
-	}
-	
-	public static LinkedHashMap<String, Object> makeReference(int classRef, boolean setBySystem) {
-		ArrayList<Integer> classRefs = new ArrayList<Integer>();
-		if (setBySystem) classRef = classRef+128;
-		classRefs.add(classRef);
-		return makeReference(classRefs, "focus");
-	}
-	
-	public static LinkedHashMap<String, Object> makeReference(int classRef) {
-		return makeReference(classRef, false);
-	}
-	
-	public static LinkedHashMap<String, Object> makeResetReference() {
-		LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
-		group.put("@type", "korap:reference");
-		group.put("operation", "operation:focus");
-		group.put("reset", true);
-		group.put("operands", new ArrayList<Object>());
-		return group;
-	}
-	
-	public static LinkedHashMap<String, Object> makeSpanReference(Integer[] spanRef, String operation) {
-		LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
-		group.put("@type", "korap:reference");
-		group.put("operation", "operation:"+operation);
-		group.put("spanRef", Arrays.asList(spanRef));
-		group.put("operands", new ArrayList<Object>());
-		return group;
-	}
-	
-	public static void addOperandsToGroup(LinkedHashMap<String, Object> group) {
-		ArrayList<Object> operands = new ArrayList<Object>();
-		group.put("operands", operands);
-	}
-	
-	public static LinkedHashMap<String, Object> wrapInReference(LinkedHashMap<String, Object> group, Integer classId) {
-		LinkedHashMap<String, Object> refGroup = makeReference(classId);
-		ArrayList<Object> operands = new ArrayList<Object>();
-		operands.add(group);
-		refGroup.put("operands", operands);
-		return refGroup;
-	}
+    public static void setQueryProcessor(AbstractQueryProcessor qp) {
+        CqlfObjectGenerator.qp = qp;
+    }
 
-	@SuppressWarnings("unchecked")
-	public static LinkedHashMap<String, Object> wrapInClass(LinkedHashMap<String, Object> group, Integer classId) {
-		LinkedHashMap<String, Object> classGroup = makeSpanClass(classId, true);
-		((ArrayList<Object>) classGroup.get("operands")).add(group);
-		return classGroup;
-	}
-	
-	/**
-	 * Ensures that a distance or quantification value does not exceed the allowed maximum value. 
-	 * @param number
-	 * @return The input number if it is below the allowed maximum value, else the maximum value. 
-	 */
-	public static int cropToMaxValue(int number) {
-		if (number > MAXIMUM_DISTANCE) {
-			number = MAXIMUM_DISTANCE; 
-			String warning = String.format("You specified a distance between two segments that is greater than " +
-					"the allowed max value of %d. Your query will be re-interpreted using a distance of %d.", MAXIMUM_DISTANCE, MAXIMUM_DISTANCE);
-			qp.addWarning(warning);
-		}
-		return number;
-	}
+    public static LinkedHashMap<String, Object> makeSpan() {
+        LinkedHashMap<String, Object> span = new LinkedHashMap<String, Object>();
+        span.put("@type", "korap:span");
+        return span;
+    }
+
+    public static LinkedHashMap<String, Object> makeSpan(String key) {
+        LinkedHashMap<String, Object> span = new LinkedHashMap<String, Object>();
+        span.put("@type", "korap:span");
+        span.put("key", key);
+        return span;
+    }
+
+    public static LinkedHashMap<String, Object> makeTerm() {
+        LinkedHashMap<String, Object> term = new LinkedHashMap<String, Object>();
+        term.put("@type", "korap:term");
+        return term;
+    }
+
+    public static LinkedHashMap<String, Object> makeTermGroup(String relation) {
+        LinkedHashMap<String, Object> term = new LinkedHashMap<String, Object>();
+        term.put("@type", "korap:termGroup");
+        term.put("relation", "relation:" + relation);
+        term.put("operands", new ArrayList<Object>());
+        return term;
+    }
+
+    public static LinkedHashMap<String, Object> makeDoc() {
+        LinkedHashMap<String, Object> term = new LinkedHashMap<String, Object>();
+        term.put("@type", "korap:doc");
+        return term;
+    }
+
+    public static LinkedHashMap<String, Object> makeDocGroup(String relation) {
+        LinkedHashMap<String, Object> term = new LinkedHashMap<String, Object>();
+        term.put("@type", "korap:docGroup");
+        term.put("operation", "operation:" + relation);
+        term.put("operands", new ArrayList<Object>());
+        return term;
+    }
+
+    public static LinkedHashMap<String, Object> makeToken() {
+        LinkedHashMap<String, Object> token = new LinkedHashMap<String, Object>();
+        token.put("@type", "korap:token");
+        return token;
+    }
+
+    public static LinkedHashMap<String, Object> makeGroup(String operation) {
+        LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
+        group.put("@type", "korap:group");
+        group.put("operation", "operation:" + operation);
+        group.put("operands", new ArrayList<Object>());
+        return group;
+    }
+
+    public static LinkedHashMap<String, Object> makeRepetition(Integer min,
+            Integer max) {
+        LinkedHashMap<String, Object> group = makeGroup("repetition");
+        group.put("boundary", makeBoundary(min, max));
+        group.put("min", min);
+        return group;
+    }
+
+    @Deprecated
+    public static LinkedHashMap<String, Object> makePosition(String frame) {
+        LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
+        group.put("@type", "korap:group");
+        group.put("operation", "operation:position");
+        group.put("frame", "frame:" + frame);
+        group.put("operands", new ArrayList<Object>());
+        return group;
+    }
+
+    public static LinkedHashMap<String, Object> makePosition(
+            String[] allowedFrames, String[] classRefCheck) {
+        LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
+        group.put("@type", "korap:group");
+        group.put("operation", "operation:position");
+        group.put("frames", Arrays.asList(allowedFrames));
+        group.put("operands", new ArrayList<Object>());
+        return group;
+    }
+
+    public static LinkedHashMap<String, Object> makeSpanClass(int classCount) {
+        return makeSpanClass(classCount, true);
+    }
+
+    public static LinkedHashMap<String, Object> makeSpanClass(int classId,
+            boolean setBySystem) {
+        LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
+        group.put("@type", "korap:group");
+        group.put("operation", "operation:class");
+        if (setBySystem) {
+            classId += 128;
+            qp.addMessage("A class has been introduced into the backend representation of "
+                    + "your query for later reference to a part of the query. The class id is "
+                    + classId);
+        }
+        group.put("class", classId);
+        group.put("classOut", classId);
+        qp.addMessage(
+                StatusCodes.DEPRECATED_QUERY_ELEMENT,
+                "Deprecated 2014-10-07: 'class' only to be supported until 3 months from deprecation date. "
+                        + "Classes are now defined using the 'classOut' attribute.");
+        group.put("operands", new ArrayList<Object>());
+        return group;
+    }
+
+    public static LinkedHashMap<String, Object> makeClassRefCheck(
+            ArrayList<String> check, Integer[] classIn, int classOut) {
+        LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
+        group.put("@type", "korap:group");
+        group.put("operation", "operation:class");
+        group.put("classRefCheck", check);
+        group.put("classIn", Arrays.asList(classIn));
+        group.put("classOut", classOut);
+        group.put("class", classOut);
+        qp.addMessage(
+                StatusCodes.DEPRECATED_QUERY_ELEMENT,
+                "Deprecated 2014-10-07: 'class' only to be supported until 3 months from deprecation date. "
+                        + "Classes are now defined using the 'classOut' attribute.");
+        group.put("operands", new ArrayList<Object>());
+        return group;
+    }
+
+    public static LinkedHashMap<String, Object> makeClassRefOp(
+            String operation, Integer[] classIn, int classOut) {
+        LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
+        group.put("@type", "korap:group");
+        group.put("operation", "operation:class");
+        group.put("classRefOp", operation);
+        group.put("classIn", Arrays.asList(classIn));
+        group.put("classOut", classOut);
+        group.put("operands", new ArrayList<Object>());
+        return group;
+    }
+
+    @Deprecated
+    public static LinkedHashMap<String, Object> makeTreeRelation(String reltype) {
+        LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
+        group.put("@type", "korap:treeRelation");
+        if (reltype != null)
+            group.put("reltype", reltype);
+        return group;
+    }
+
+    public static LinkedHashMap<String, Object> makeRelation() {
+        LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
+        group.put("@type", "korap:relation");
+        return group;
+    }
+
+    public static LinkedHashMap<String, Object> makeBoundary(Integer min,
+            Integer max) {
+        LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
+        group.put("@type", "korap:boundary");
+        group.put("min", min);
+        if (max != null) {
+            group.put("max", max);
+        }
+        return group;
+    }
+
+    public static LinkedHashMap<String, Object> makeDistance(String key,
+            Integer min, Integer max) {
+        LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
+        if (key.equals("w")) {
+            group.put("@type", "korap:distance");
+        }
+        else {
+            group.put("@type", "cosmas:distance");
+        }
+        group.put("key", key);
+        group.put("boundary", makeBoundary(min, max));
+        group.put("min", min);
+        if (max != null) {
+            group.put("max", max);
+        }
+        qp.addMessage(
+                StatusCodes.DEPRECATED_QUERY_ELEMENT,
+                "Deprecated 2014-07-24: 'min' and 'max' to be supported until 3 months from deprecation date.");
+        return group;
+    }
+
+    public static LinkedHashMap<String, Object> makeReference(
+            ArrayList<Integer> classRefs, String operation) {
+        LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
+        group.put("@type", "korap:reference");
+        group.put("operation", "operation:" + operation);
+        if (classRefs != null && !classRefs.isEmpty()) {
+            group.put("classRef", classRefs);
+        }
+        return group;
+    }
+
+    public static LinkedHashMap<String, Object> makeReference(
+            ArrayList<Integer> classRefs) {
+        return makeReference(classRefs, "focus");
+    }
+
+    public static LinkedHashMap<String, Object> makeReference(int classRef,
+            String operation, boolean setBySystem) {
+        ArrayList<Integer> classRefs = new ArrayList<Integer>();
+        if (setBySystem)
+            classRef = classRef + 128;
+        classRefs.add(classRef);
+        return makeReference(classRefs, operation);
+    }
+
+    public static LinkedHashMap<String, Object> makeReference(int classRef,
+            boolean setBySystem) {
+        ArrayList<Integer> classRefs = new ArrayList<Integer>();
+        if (setBySystem)
+            classRef = classRef + 128;
+        classRefs.add(classRef);
+        return makeReference(classRefs, "focus");
+    }
+
+    public static LinkedHashMap<String, Object> makeReference(int classRef) {
+        return makeReference(classRef, false);
+    }
+
+    public static LinkedHashMap<String, Object> makeResetReference() {
+        LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
+        group.put("@type", "korap:reference");
+        group.put("operation", "operation:focus");
+        group.put("reset", true);
+        group.put("operands", new ArrayList<Object>());
+        return group;
+    }
+
+    public static LinkedHashMap<String, Object> makeSpanReference(
+            Integer[] spanRef, String operation) {
+        LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
+        group.put("@type", "korap:reference");
+        group.put("operation", "operation:" + operation);
+        group.put("spanRef", Arrays.asList(spanRef));
+        group.put("operands", new ArrayList<Object>());
+        return group;
+    }
+
+    public static void addOperandsToGroup(LinkedHashMap<String, Object> group) {
+        ArrayList<Object> operands = new ArrayList<Object>();
+        group.put("operands", operands);
+    }
+  
+    public static LinkedHashMap<String, Object> wrapInReference(
+            LinkedHashMap<String, Object> group, Integer classId) {
+        return wrapInReference(group, classId, false);
+    }
+    
+    public static LinkedHashMap<String, Object> wrapInReference(
+            LinkedHashMap<String, Object> group, Integer classId, 
+            boolean setBySystem) {
+        LinkedHashMap<String, Object> refGroup = makeReference(classId, setBySystem);
+        ArrayList<Object> operands = new ArrayList<Object>();
+        operands.add(group);
+        refGroup.put("operands", operands);
+        return refGroup;
+    }
+
+    @SuppressWarnings("unchecked")
+    public static LinkedHashMap<String, Object> wrapInClass(
+            LinkedHashMap<String, Object> group, Integer classId) {
+        LinkedHashMap<String, Object> classGroup = makeSpanClass(classId, true);
+        ((ArrayList<Object>) classGroup.get("operands")).add(group);
+        return classGroup;
+    }
+
+    /**
+     * Ensures that a distance or quantification value does not exceed
+     * the allowed maximum value.
+     * 
+     * @param number
+     * @return The input number if it is below the allowed maximum
+     *         value, else the maximum value.
+     */
+    public static int cropToMaxValue(int number) {
+        if (number > MAXIMUM_DISTANCE) {
+            number = MAXIMUM_DISTANCE;
+            String warning = String
+                    .format("You specified a distance between two segments that is greater than "
+                            + "the allowed max value of %d. Your query will be re-interpreted using a distance of %d.",
+                            MAXIMUM_DISTANCE, MAXIMUM_DISTANCE);
+            qp.addWarning(warning);
+        }
+        return number;
+    }
 }
\ No newline at end of file
diff --git a/src/test/java/AnnisQueryProcessorTest.java b/src/test/java/AnnisQueryProcessorTest.java
index aff5770..2788dad 100644
--- a/src/test/java/AnnisQueryProcessorTest.java
+++ b/src/test/java/AnnisQueryProcessorTest.java
@@ -10,7 +10,6 @@
 import com.fasterxml.jackson.databind.ObjectMapper;
 
 import de.ids_mannheim.korap.query.serialize.QuerySerializer;
-import de.ids_mannheim.korap.query.serialize.util.QueryException;
 
 /**
  * Tests for JSON-LD serialization of ANNIS QL queries. 
@@ -27,7 +26,7 @@
 	JsonNode res;
 
 	@Test
-	public void testContext() throws QueryException, JsonProcessingException, IOException {
+	public void testContext() throws JsonProcessingException, IOException {
 		String contextUrl = "http://ids-mannheim.de/ns/KorAP/json-ld/v0.2/context.jsonld";
 		query = "foo";
 		qs.setQuery(query, "annis");
@@ -36,7 +35,7 @@
 	}
 	
 	@Test
-	public void testSingleTokens() throws QueryException, JsonProcessingException, IOException {
+	public void testSingleTokens() throws JsonProcessingException, IOException {
 		query = "\"Mann\"";
 		qs.setQuery(query, "annis");
 		res = mapper.readTree(qs.toJSON());
@@ -68,7 +67,7 @@
 	}
 	
 	@Test 
-	public void testSpans() throws QueryException, JsonProcessingException, IOException {
+	public void testSpans() throws JsonProcessingException, IOException {
 		query = "node"; // special keyword for general span
 		qs.setQuery(query, "annis");
 		res = mapper.readTree(qs.toJSON());
@@ -90,7 +89,7 @@
 	}
 	
 	@Test
-	public void testRegex() throws QueryException, JsonProcessingException, IOException {
+	public void testRegex() throws JsonProcessingException, IOException {
 		query = "/Mann/";  
 		qs.setQuery(query, "annis");
 		res = mapper.readTree(qs.toJSON());
@@ -109,7 +108,7 @@
 	}
 
 	@Test
-	public void testFoundriesLayers() throws QueryException, JsonProcessingException, IOException {
+	public void testFoundriesLayers() throws JsonProcessingException, IOException {
 		query = "c=\"np\"";  
 		qs.setQuery(query, "annis");
 		res = mapper.readTree(qs.toJSON());
@@ -136,7 +135,7 @@
 	}
 	
 	@Test
-	public void testDirectDeclarationRelations() throws QueryException, JsonProcessingException, IOException {
+	public void testDirectDeclarationRelations() throws JsonProcessingException, IOException {
 		query = "node > node";  
 		qs.setQuery(query, "annis");
 		res = mapper.readTree(qs.toJSON());
@@ -184,7 +183,7 @@
 	}
 	
 	@Test
-	public void testDefPredicationInversion() throws QueryException, JsonProcessingException, IOException {
+	public void testDefPredicationInversion() throws JsonProcessingException, IOException {
 		query = "#1 > #2 & cnx/cat=\"vp\" & cnx/cat=\"np\"";  
 		qs.setQuery(query, "annis");
 		res = mapper.readTree(qs.toJSON());
@@ -205,7 +204,7 @@
 	}
 	
 	@Test
-	public void testSimpleDominance() throws QueryException, JsonProcessingException, IOException {
+	public void testSimpleDominance() throws JsonProcessingException, IOException {
 		query = "node & node & #2 > #1";  
 		qs.setQuery(query, "annis");
 		res = mapper.readTree(qs.toJSON());
@@ -287,7 +286,7 @@
 	}
 	
 	@Test
-	public void testIndirectDominance() throws QueryException, JsonProcessingException, IOException {
+	public void testIndirectDominance() throws JsonProcessingException, IOException {
 		query = "node & node & #1 >2,4 #2";  
 		qs.setQuery(query, "annis");
 		res = mapper.readTree(qs.toJSON());
@@ -310,8 +309,26 @@
 
 		
 	@Test
-	public void testMultipleDominance() throws QueryException {
-		
+	public void testMultipleDominance() throws JsonProcessingException, IOException {
+		query = "cat=\"CP\" & cat=\"VP\" & cat=\"NP\" & #1 > #2 > #3";  
+		qs.setQuery(query, "annis");
+		res = mapper.readTree(qs.toJSON());
+		assertEquals("korap:group",			res.at("/query/@type").asText());
+		assertEquals("operation:relation",	res.at("/query/operation").asText());
+		assertEquals("korap:reference",		res.at("/query/operands/0/@type").asText());
+		assertEquals("operation:focus",		res.at("/query/operands/0/operation").asText());
+		assertEquals(128,					res.at("/query/operands/0/classRef/0").asInt());
+		assertEquals("korap:group",			res.at("/query/operands/0/operands/0/@type").asText());
+		assertEquals("operation:relation",	res.at("/query/operands/0/operands/0/operation").asText());
+		assertEquals("korap:relation",		res.at("/query/operands/0/operands/0/relation/@type").asText());
+		assertEquals("c",					res.at("/query/operands/0/operands/0/relation/wrap/layer").asText());
+		assertEquals("korap:span",			res.at("/query/operands/0/operands/0/operands/0/@type").asText());
+		assertEquals("c",					res.at("/query/operands/0/operands/0/operands/0/layer").asText());
+		assertEquals("CP",					res.at("/query/operands/0/operands/0/operands/0/key").asText());
+		assertEquals("korap:group",			res.at("/query/operands/0/operands/0/operands/1/@type").asText());
+		assertEquals("operation:class",		res.at("/query/operands/0/operands/0/operands/1/operation").asText());
+		assertEquals(128,					res.at("/query/operands/0/operands/0/operands/1/classOut").asInt());
+		assertEquals("VP",					res.at("/query/operands/0/operands/0/operands/1/operands/0/key").asText());
 	}
 //		query = "cat=\"CP\" & cat=\"VP\" & cat=\"NP\" & #1 > #2 > #3";
 //		String dom1 = 
@@ -358,7 +375,7 @@
 //	}
 //	
 //	@Test
-//	public void testPointingRelations() throws QueryException {
+//	public void testPointingRelations() throws Exception {
 //		query = "node & node & #2 ->coref[val=\"true\"] #1";
 //		String dom1 = 
 //				"{@type=korap:group, operation=operation:relation, operands=[" +
@@ -383,7 +400,7 @@
 //	}
 //	
 //	@Test
-//	public void testSequence() throws QueryException {
+//	public void testSequence() throws Exception {
 //		query = "node & node & #1 . #2";
 //		String seq1 = 
 //				"{@type=korap:group, operation=operation:sequence, " +
@@ -424,30 +441,38 @@
 //		
 //	}
 //	
-//	@Test
-//	public void testMultipleSequence() throws QueryException {
-//		query = "tok=\"Sonne\" & tok=\"Mond\" & tok=\"Sterne\" & #1 .0,2 #2 .0,4 #3";
-//		String seq4 = 
-//				"{@type=korap:group, operation=operation:sequence," +
-//					"operands=[" +
-//						"{@type=korap:reference, operation=operation:focus, classRef=[0], operands=[" +
-//							"{@type=korap:group, operation=operation:sequence, operands=[" +
-//								"{@type=korap:token, wrap={@type=korap:term, layer=orth, key=Sonne, match=match:eq}}," +
-//								"{@type=korap:group, operation=operation:class, class=128, classOut=128, operands=[" +
-//									"{@type=korap:token, wrap={@type=korap:term, layer=orth, key=Mond, match=match:eq}}" +
-//								"]}" +
-//							"], distances=[" +
-//								"{@type=korap:distance, key=w, boundary={@type=korap:boundary, min=0, max=2}, min=0, max=2}" +
-//							"], inOrder=true}" +
-//						"]}," +	
-//						"{@type=korap:token, wrap={@type=korap:term, layer=orth, key=Sterne, match=match:eq}}" +
-//					"],distances=[" +
-//						"{@type=korap:distance, key=w, boundary={@type=korap:boundary, min=0, max=4}, min=0, max=4}" +
-//					"], inOrder=true" +
-//				"}";
-//		aqlt = new AqlTree(query);
-//		map = aqlt.getRequestMap().get("query").toString();
-//		assertEquals(seq4.replaceAll(" ", ""), map.replaceAll(" ", ""));
+	@Test
+	public void testMultipleSequence() throws Exception {
+		query = "tok=\"a\" & tok=\"b\" & tok=\"c\" & #1 . #2 & #2 . #3";
+		String seq4 = 
+				"{@type=korap:group, operation=operation:sequence," +
+					"operands=[" +
+						"{@type=korap:reference, operation=operation:focus, classRef=[0], operands=[" +
+							"{@type=korap:group, operation=operation:sequence, operands=[" +
+								"{@type=korap:token, wrap={@type=korap:term, layer=orth, key=Sonne, match=match:eq}}," +
+								"{@type=korap:group, operation=operation:class, class=128, classOut=128, operands=[" +
+									"{@type=korap:token, wrap={@type=korap:term, layer=orth, key=Mond, match=match:eq}}" +
+								"]}" +
+							"], distances=[" +
+								"{@type=korap:distance, key=w, boundary={@type=korap:boundary, min=0, max=2}, min=0, max=2}" +
+							"], inOrder=true}" +
+						"]}," +	
+						"{@type=korap:token, wrap={@type=korap:term, layer=orth, key=Sterne, match=match:eq}}" +
+					"],distances=[" +
+						"{@type=korap:distance, key=w, boundary={@type=korap:boundary, min=0, max=4}, min=0, max=4}" +
+					"], inOrder=true" +
+				"}";
+        qs.setQuery(query, "annis");
+        res = mapper.readTree(qs.toJSON());
+        assertEquals("korap:group",         res.at("/query/@type").asText());
+        assertEquals("operation:sequence",  res.at("/query/operation").asText());
+        assertEquals("korap:reference",     res.at("/query/operands/0/@type").asText());
+        assertEquals(128,                   res.at("/query/operands/0/classRef/0").asInt());
+        assertEquals(res.at("/query/operands/0/classRef/0").asInt(), 
+                     res.at("/query/operands/0/operands/0/operands/1/classOut").asInt());
+	}
+
+        
 //		
 //		query = "node & node & node & #1 . #2 .1,3 #3";
 //		String seq5 = 
@@ -499,7 +524,7 @@
 //	}
 //	
 //	@Test
-//	public void testMultipleMixedOperators() throws QueryException {
+//	public void testMultipleMixedOperators() throws Exception {
 //		query = "tok=\"Sonne\" & tok=\"Mond\" & tok=\"Sterne\" & #1 > #2 .0,4 #3";
 //		String seq4 = 
 //					"{@type=korap:group, operation=operation:sequence, operands=[" +
@@ -568,7 +593,7 @@
 //	}
 //	/*
 //	@Test
-//	public void testMultipleOperatorsWithSameOperands() throws QueryException {
+//	public void testMultipleOperatorsWithSameOperands() throws Exception {
 //		
 //		query = "cat=\"NP\" > cat=\"VP\" & #1 _l_ #2";
 //		String eq2 =
@@ -590,7 +615,7 @@
 //	}
 //	*/
 //	@Test
-//	public void testPositions() throws QueryException {
+//	public void testPositions() throws Exception {
 //		query = "node & node & #2 _=_ #1";
 //		String pos1 = 
 //				"{@type=korap:group, operation=operation:position, frames=[frames:matches], operands=[" +
@@ -647,7 +672,7 @@
 //	}
 //	
 //	@Test
-//	public void testMultiplePredications() throws QueryException {
+//	public void testMultiplePredications() throws Exception {
 //		// a noun before a verb before a preposition
 //		query = "pos=\"N\" & pos=\"V\" & pos=\"P\" & #1 . #2 & #2 . #3"; 
 //		String mult1 = 
@@ -733,7 +758,7 @@
 //	}	
 //	
 //	@Test
-//	public void testUnaryRelations() throws QueryException {
+//	public void testUnaryRelations() throws Exception {
 //		query = "node & #1:tokenarity=2";
 //		String unary1 = 
 //				"{@type=korap:span, attr={@type=korap:term, tokenarity={@type=korap:boundary,min=2,max=2}}}";
@@ -768,7 +793,7 @@
 //	}	
 //	
 //	@Test
-//	public void testCommonParent() throws QueryException {
+//	public void testCommonParent() throws Exception {
 //		query = "cat=\"NP\" & cat=\"VP\" & #1 $ #2";
 //		String cp1 =
 //				"{@type=korap:group, operation=operation:relation, operands=[" +
@@ -861,7 +886,7 @@
 	
 	/*		
 	@Test
-	public void testEqualNotequalValue() throws QueryException {
+	public void testEqualNotequalValue() throws Exception {
 		query = "cat=\"NP\" & cat=\"VP\" & #1 == #2";
 		String eq1 =
 				"{}"; // ???
diff --git a/src/test/java/CollectionQueryProcessorTest.java b/src/test/java/CollectionQueryProcessorTest.java
index 1d3046d..ebd5555 100644
--- a/src/test/java/CollectionQueryProcessorTest.java
+++ b/src/test/java/CollectionQueryProcessorTest.java
@@ -4,7 +4,6 @@
 import java.util.ArrayList;
 
 import de.ids_mannheim.korap.query.serialize.QuerySerializer;
-import de.ids_mannheim.korap.query.serialize.util.QueryException;
 
 import org.junit.Test;
 
@@ -24,7 +23,7 @@
 	JsonNode res;
 	
 	@Test
-	public void testContext() throws QueryException, JsonProcessingException, IOException {
+	public void testContext() throws JsonProcessingException, IOException {
 		collection = "textClass=politik";
 		String contextString = "http://ids-mannheim.de/ns/KorAP/json-ld/v0.2/context.jsonld";
 		qs.setQuery(query,ql);
@@ -34,7 +33,7 @@
 	}
 	
 	@Test
-	public void testSimple() throws QueryException, JsonProcessingException, IOException {
+	public void testSimple() throws JsonProcessingException, IOException {
 		collection = "textClass=politik";
 		qs.setQuery(query,ql);
 		qs.setCollection(collection);
@@ -55,7 +54,7 @@
 	}
 	
 	@Test
-	public void testContains() throws QueryException, JsonProcessingException, IOException {
+	public void testContains() throws JsonProcessingException, IOException {
 		collection = "title~Mannheim";
 		qs.setQuery(query,ql);
 		qs.setCollection(collection);
@@ -76,7 +75,7 @@
 	}
 	
 	@Test
-	public void testTwoConjuncts() throws QueryException, JsonProcessingException, IOException {
+	public void testTwoConjuncts() throws JsonProcessingException, IOException {
 		collection = "textClass=Sport & pubDate in 2014";
 		qs.setQuery(query,ql);
 		qs.setCollection(collection);
@@ -112,7 +111,7 @@
 	}
 
 	@Test
-	public void testThreeConjuncts() throws QueryException, JsonProcessingException, IOException {
+	public void testThreeConjuncts() throws JsonProcessingException, IOException {
 		collection = "textClass=Sport & pubDate in 2014 & corpusId=WPD";
 		qs.setQuery(query,ql);
 		qs.setCollection(collection);
@@ -136,7 +135,7 @@
 		assertEquals("match:eq",		res.at("/collection/operands/1/operands/1/match").asText());
 	}
 	@Test
-	public void testTwoDisjuncts() throws QueryException, JsonProcessingException, IOException {
+	public void testTwoDisjuncts() throws JsonProcessingException, IOException {
 		collection = "textClass=Sport | pubDate in 2014";
 		qs.setQuery(query,ql);
 		qs.setCollection(collection);
@@ -155,7 +154,7 @@
 	}
 	
 	@Test
-	public void testThreeDisjuncts() throws QueryException, JsonProcessingException, IOException {
+	public void testThreeDisjuncts() throws JsonProcessingException, IOException {
 		collection = "textClass=Sport | pubDate in 2014 | corpusId=WPD";
 		qs.setQuery(query,ql);
 		qs.setCollection(collection);
@@ -180,7 +179,7 @@
 	}
 	
 	@Test
-	public void testMixed() throws QueryException, JsonProcessingException, IOException {
+	public void testMixed() throws JsonProcessingException, IOException {
 		collection = "textClass=Sport | (pubDate in 2014 & corpusId=WPD)";
 		qs.setQuery(query,ql);
 		qs.setCollection(collection);
@@ -291,7 +290,7 @@
 	}
 
 	@Test
-	public void testDateYear() throws QueryException, JsonProcessingException, IOException {
+	public void testDateYear() throws JsonProcessingException, IOException {
 		collection = "pubDate in 2000";
 		qs.setQuery(query,ql);
 		qs.setCollection(collection);
@@ -334,7 +333,7 @@
 	}
 	
 	@Test
-	public void testDateMonthDay() throws QueryException, JsonProcessingException, IOException {
+	public void testDateMonthDay() throws JsonProcessingException, IOException {
 		collection = "pubDate in 2000-02";
 		qs.setQuery(query,ql);
 		qs.setCollection(collection);
diff --git a/src/test/java/Cosmas2QueryProcessorTest.java b/src/test/java/Cosmas2QueryProcessorTest.java
index 2a545cf..6d5194f 100644
--- a/src/test/java/Cosmas2QueryProcessorTest.java
+++ b/src/test/java/Cosmas2QueryProcessorTest.java
@@ -6,7 +6,6 @@
 import org.junit.Test;
 
 import de.ids_mannheim.korap.query.serialize.QuerySerializer;
-import de.ids_mannheim.korap.query.serialize.util.QueryException;
 
 import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.databind.JsonNode;
@@ -28,7 +27,7 @@
 	JsonNode res;
 
 	@Test
-	public void testContext() throws QueryException, JsonProcessingException, IOException {
+	public void testContext() throws JsonProcessingException, IOException {
 		String contextString = "http://ids-mannheim.de/ns/KorAP/json-ld/v0.2/context.jsonld";
 		query = "foo";
 		qs.setQuery(query, "cosmas2");
@@ -38,7 +37,7 @@
 
 
 	@Test
-	public void testSingleToken() throws QueryException, JsonProcessingException, IOException {
+	public void testSingleToken() throws JsonProcessingException, IOException {
 		query = "der";
 		qs.setQuery(query, "cosmas2");
 		res = mapper.readTree(qs.toJSON());
@@ -61,7 +60,7 @@
 
 
 	@Test
-	public void testWildcardToken() throws QueryException, JsonProcessingException, IOException {
+	public void testWildcardToken() throws JsonProcessingException, IOException {
 		query = "*der";
 		qs.setQuery(query, "cosmas2");
 		res = mapper.readTree(qs.toJSON());
@@ -78,7 +77,7 @@
 	}
 	//	
 	@Test
-	public void testCaseSensitivityFlag() throws QueryException, JsonProcessingException, IOException {
+	public void testCaseSensitivityFlag() throws JsonProcessingException, IOException {
 		query = "$deutscher";
 		qs.setQuery(query, "cosmas2");
 		res = mapper.readTree(qs.toJSON());
@@ -103,7 +102,7 @@
 	}
 
 	@Test
-	public void testMORPH() throws QueryException, JsonProcessingException, IOException {
+	public void testMORPH() throws JsonProcessingException, IOException {
 		query = "MORPH(p=V)";
 		qs.setQuery(query, "cosmas2");
 		res = mapper.readTree(qs.toJSON());
@@ -159,7 +158,7 @@
 	}
 
 	@Test
-	public void testSequence() throws QueryException, JsonProcessingException, IOException {
+	public void testSequence() throws JsonProcessingException, IOException {
 		query = "der Mann";
 		qs.setQuery(query, "cosmas2");
 		res = mapper.readTree(qs.toJSON());
@@ -224,7 +223,7 @@
 	}
 
 	@Test
-	public void testOPOR() throws QueryException, JsonProcessingException, IOException {
+	public void testOPOR() throws JsonProcessingException, IOException {
 		query = "Sonne oder Mond";
 		qs.setQuery(query, "cosmas2");
 		res = mapper.readTree(qs.toJSON());
@@ -263,7 +262,7 @@
 	}
 
 	@Test
-	public void testOPORAND() throws QueryException, JsonProcessingException, IOException {
+	public void testOPORAND() throws JsonProcessingException, IOException {
 		query = "(Sonne oder Mond) und scheint";
 		qs.setQuery(query, "cosmas2");
 		res = mapper.readTree(qs.toJSON());
@@ -321,7 +320,7 @@
 	}
 
 	@Test
-	public void testOPNOT() throws QueryException, JsonProcessingException, IOException {
+	public void testOPNOT() throws JsonProcessingException, IOException {
 		query = "Sonne nicht Mond";
 		qs.setQuery(query, "cosmas2");
 		res = mapper.readTree(qs.toJSON());
@@ -382,7 +381,7 @@
 	}
 
 	@Test
-	public void testOPPROX() throws QueryException, JsonProcessingException, IOException {
+	public void testOPPROX() throws JsonProcessingException, IOException {
 		query = "Sonne /+w1:4 Mond";
 		qs.setQuery(query, "cosmas2");
 		res = mapper.readTree(qs.toJSON());
@@ -461,7 +460,7 @@
 	}
 
 	@Test
-	public void testOPPROXNested() throws QueryException, JsonProcessingException, IOException {	
+	public void testOPPROXNested() throws JsonProcessingException, IOException {	
 		query = "Sonne /+w1:4 Mond /+w1:7 Sterne";
 		qs.setQuery(query, "cosmas2");
 		res = mapper.readTree(qs.toJSON());
@@ -532,7 +531,7 @@
 	}
 
 	@Test
-	public void testOPIN() throws QueryException, JsonProcessingException, IOException {
+	public void testOPIN() throws JsonProcessingException, IOException {
 		query = "wegen #IN <s>";
 		qs.setQuery(query, "cosmas2");
 		res = mapper.readTree(qs.toJSON());
@@ -613,7 +612,7 @@
 	}
 
 	@Test
-	public void testOPOV() throws QueryException, JsonProcessingException, IOException {
+	public void testOPOV() throws JsonProcessingException, IOException {
 		query = "wegen #OV <s>";
 		qs.setQuery(query, "cosmas2");
 		res = mapper.readTree(qs.toJSON());
@@ -668,7 +667,7 @@
 
 
 	@Test
-	public void testBEG_END() throws QueryException, JsonProcessingException, IOException {
+	public void testBEG_END() throws JsonProcessingException, IOException {
 		query = "#BEG(der /w3:5 Mann)";
 		qs.setQuery(query, "cosmas2");
 		res = mapper.readTree(qs.toJSON());
@@ -777,7 +776,7 @@
 	}
 
 	@Test
-	public void testELEM() throws QueryException, JsonProcessingException, IOException {
+	public void testELEM() throws JsonProcessingException, IOException {
 		query = "#ELEM(S)";
 		qs.setQuery(query, "cosmas2");
 		res = mapper.readTree(qs.toJSON());
@@ -847,7 +846,7 @@
 		assertEquals("match:ne",		res.at("/query/attr/operands/1/operands/1/match").asText());
 	}
 	@Test
-	public void testOPALL() throws QueryException, JsonProcessingException, IOException {
+	public void testOPALL() throws JsonProcessingException, IOException {
 		query = "#ALL(gehen /w1:10 voran)";
 		qs.setQuery(query, "cosmas2");
 		res = mapper.readTree(qs.toJSON());
@@ -883,7 +882,7 @@
 	}
 
 	@Test
-	public void testOPNHIT() throws QueryException, JsonProcessingException, IOException {
+	public void testOPNHIT() throws JsonProcessingException, IOException {
 		query = "#NHIT(gehen /w1:10 voran)";
 		qs.setQuery(query, "cosmas2");
 		res = mapper.readTree(qs.toJSON());
@@ -944,7 +943,7 @@
 	}
 
 	@Test
-	public void testOPBED() throws QueryException, JsonProcessingException, IOException {
+	public void testOPBED() throws JsonProcessingException, IOException {
 		query = "#BED(der , sa)";
 		qs.setQuery(query, "cosmas2");
 		res = mapper.readTree(qs.toJSON());
@@ -1026,7 +1025,7 @@
 	}
 
 	@Test
-	public void testColonSeparatedConditions() throws QueryException, JsonProcessingException, IOException {
+	public void testColonSeparatedConditions() throws JsonProcessingException, IOException {
 		query = "der:sa";
 		qs.setQuery(query, "cosmas2");
 		res = mapper.readTree(qs.toJSON());
diff --git a/src/test/java/CqlQueryProcessorTest.java b/src/test/java/CqlQueryProcessorTest.java
index 7fd62ae..91917f1 100644
--- a/src/test/java/CqlQueryProcessorTest.java
+++ b/src/test/java/CqlQueryProcessorTest.java
@@ -10,7 +10,6 @@
 
 import de.ids_mannheim.korap.query.serialize.CqlQueryProcessor;
 import de.ids_mannheim.korap.query.serialize.Cosmas2QueryProcessor;
-import de.ids_mannheim.korap.query.serialize.util.QueryException;
 
 
 public class CqlQueryProcessorTest {
@@ -24,7 +23,7 @@
 		query = "(Kuh) prox (Germ) ";
 		try {
 			CqlQueryProcessor cqlTree = new CqlQueryProcessor(query, version);
-		} catch (QueryException e) {
+		} catch (Exception e) {
 			int errorCode = Integer.parseInt(e.getMessage().split(":")[0].replace("SRU diagnostic ", ""));
 			assertEquals(48,errorCode);
 		}
@@ -32,7 +31,7 @@
 		query = "(Kuh) or/rel.combine=sum (Germ) ";		
 		try {
 			CqlQueryProcessor cqlTree = new CqlQueryProcessor(query, version);
-		}catch (QueryException e) {			
+		}catch (Exception e) {			
 			int errorCode = Integer.parseInt(e.getMessage().split(":")[0].replace("SRU diagnostic ", ""));
 			assertEquals(20,errorCode);
 		}
@@ -40,7 +39,7 @@
 		query = "dc.title any Germ ";
 		try {
 			CqlQueryProcessor cqlTree = new CqlQueryProcessor(query, version);
-		} catch (QueryException e) {
+		} catch (Exception e) {
 			int errorCode = Integer.parseInt(e.getMessage().split(":")[0].replace("SRU diagnostic ", ""));
 			assertEquals(16,errorCode);
 		}
@@ -48,7 +47,7 @@
 		query = "cql.serverChoice any Germ ";
 		try {
 			CqlQueryProcessor cqlTree = new CqlQueryProcessor(query, version);
-		} catch (QueryException e) {
+		} catch (Exception e) {
 			int errorCode = Integer.parseInt(e.getMessage().split(":")[0].replace("SRU diagnostic ", ""));
 			assertEquals(19,errorCode);
 		}
@@ -56,14 +55,14 @@
 		query = "";
 		try {
 			CqlQueryProcessor cqlTree = new CqlQueryProcessor(query, version);
-		} catch (QueryException e) {
+		} catch (Exception e) {
 			int errorCode = Integer.parseInt(e.getMessage().split(":")[0].replace("SRU diagnostic ", ""));
 			assertEquals(27,errorCode);
 		}
 	}
 	
 	@Test
-	public void testAndQuery() throws CQLParseException, IOException, QueryException{
+	public void testAndQuery() throws CQLParseException, IOException, Exception{
 		query="(Sonne) and (scheint)";	
 		String jsonLd = 
 			"{@type : korap:group, operation : operation:sequence, inOrder : false," +		
@@ -84,7 +83,7 @@
 	}
 	
 	@Test
-	public void testBooleanQuery() throws CQLParseException, IOException, QueryException{		
+	public void testBooleanQuery() throws CQLParseException, IOException, Exception{		
 		query="((Sonne) or (Mond)) and (scheint)";		
 		String jsonLd = 
 			"{@type:korap:group, operation:operation:sequence, inOrder : false, distances:[" +
@@ -119,7 +118,7 @@
 	}
 	
 	@Test
-	public void testOrQuery() throws CQLParseException, IOException, QueryException{
+	public void testOrQuery() throws CQLParseException, IOException, Exception{
 		query = "(Sonne) or (Mond)";		
 		String jsonLd = 
 			"{@type:korap:group, operation:operation:or, operands:[" +
@@ -163,7 +162,7 @@
 	}
 	
 	@Test
-	public void testTermQuery() throws CQLParseException, IOException, QueryException{
+	public void testTermQuery() throws CQLParseException, IOException, Exception{
 		query = "Sonne";		
 		String jsonLd = "{@type:korap:token, wrap:{@type:korap:term, key:Sonne, layer:orth, match:match:eq}}";		
 		CqlQueryProcessor cqlTree = new CqlQueryProcessor(query, version);		
@@ -172,7 +171,7 @@
 	}
 	
 	@Test
-	public void testPhraseQuery() throws CQLParseException, IOException, QueryException{
+	public void testPhraseQuery() throws CQLParseException, IOException, Exception{
 		query="\"der Mann\"";				
 		String jsonLd = 
 			"{@type:korap:group, operation:operation:sequence, operands:[" +
diff --git a/src/test/java/PoliqarpPlusQueryProcessorTest.java b/src/test/java/PoliqarpPlusQueryProcessorTest.java
index d5b5890..309bdc6 100644
--- a/src/test/java/PoliqarpPlusQueryProcessorTest.java
+++ b/src/test/java/PoliqarpPlusQueryProcessorTest.java
@@ -12,7 +12,6 @@
 
 
 import de.ids_mannheim.korap.query.serialize.QuerySerializer;
-import de.ids_mannheim.korap.query.serialize.util.QueryException;
 
 /**
  * Tests for JSON-LD serialization of PoliqarpPlus queries. 
@@ -29,7 +28,7 @@
 	JsonNode res;
 
 	@Test
-	public void testContext() throws QueryException, JsonProcessingException, IOException {
+	public void testContext() throws JsonProcessingException, IOException {
 		query = "foo";
 		String contextString = "http://ids-mannheim.de/ns/KorAP/json-ld/v0.2/context.jsonld";
 		qs.setQuery(query, "poliqarpplus");
@@ -38,7 +37,7 @@
 	}
 
 	@Test
-	public void testSingleTokens() throws QueryException, JsonProcessingException, IOException {
+	public void testSingleTokens() throws JsonProcessingException, IOException {
 		query = "[base=Mann]";
 		qs.setQuery(query, "poliqarpplus");
 		res = mapper.readTree(qs.toJSON());
@@ -82,7 +81,7 @@
 	}
 
 	@Test
-	public void testValue() throws QueryException, JsonProcessingException, IOException {
+	public void testValue() throws JsonProcessingException, IOException {
 		query = "[mate/m=temp:pres]";
 		qs.setQuery(query, "poliqarpplus");
 		res = mapper.readTree(qs.toJSON());
@@ -96,7 +95,7 @@
 	}
 
 	@Test
-	public void testRegex() throws QueryException, JsonProcessingException, IOException {
+	public void testRegex() throws JsonProcessingException, IOException {
 		query = "[orth=\"M(a|ä)nn(er)?\"]";
 		qs.setQuery(query, "poliqarpplus");
 		res = mapper.readTree(qs.toJSON());
@@ -140,7 +139,7 @@
 	}
 
 	@Test
-	public void testCaseSensitivityFlag() throws QueryException, JsonProcessingException, IOException {
+	public void testCaseSensitivityFlag() throws JsonProcessingException, IOException {
 		query = "[orth=deutscher/i]";
 		qs.setQuery(query, "poliqarpplus");
 		res = mapper.readTree(qs.toJSON());
@@ -207,7 +206,7 @@
 	}
 
 	@Test
-	public void testSpans() throws QueryException, JsonProcessingException, IOException {
+	public void testSpans() throws JsonProcessingException, IOException {
 		query = "<s>";
 		qs.setQuery(query, "poliqarpplus");
 		res = mapper.readTree(qs.toJSON());
@@ -283,7 +282,7 @@
 	}
 
 	@Test
-	public void testDistances() throws QueryException, JsonProcessingException, IOException {
+	public void testDistances() throws JsonProcessingException, IOException {
 		query = "[base=der][][base=Mann]";
 		qs.setQuery(query, "poliqarpplus");
 		res = mapper.readTree(qs.toJSON());
@@ -439,7 +438,7 @@
 	}
 
 	@Test
-	public void testDistancesWithClass() throws QueryException, JsonProcessingException, IOException {
+	public void testDistancesWithClass() throws JsonProcessingException, IOException {
 		query = "[base=der]{[]}[base=Mann]";
 		qs.setQuery(query, "poliqarpplus");
 		res = mapper.readTree(qs.toJSON());
@@ -482,7 +481,7 @@
 	}
 
 	@Test
-	public void testLeadingTrailingEmptyTokens() throws QueryException, JsonProcessingException, IOException {
+	public void testLeadingTrailingEmptyTokens() throws JsonProcessingException, IOException {
 		query = "[][base=Mann]";
 		qs.setQuery(query, "poliqarpplus");
 		res = mapper.readTree(qs.toJSON());
@@ -512,7 +511,7 @@
 	}
 
 	@Test
-	public void testRepetition() throws QueryException, JsonProcessingException, IOException {
+	public void testRepetition() throws JsonProcessingException, IOException {
 		query = "der{3}";
 		qs.setQuery(query, "poliqarpplus");
 		res = mapper.readTree(qs.toJSON());
@@ -554,7 +553,7 @@
 	};
 
 	@Test
-	public void testGroupRepetition() throws QueryException, JsonProcessingException, IOException {
+	public void testGroupRepetition() throws JsonProcessingException, IOException {
 		query = "contains(<s>, (der){3})";
 		qs.setQuery(query, "poliqarpplus");
 		res = mapper.readTree(qs.toJSON());
@@ -591,7 +590,7 @@
 	};
 
 	@Test
-	public void testPositions() throws QueryException, JsonProcessingException, IOException {
+	public void testPositions() throws JsonProcessingException, IOException {
 		query = "contains(<s>, der)";
 		qs.setQuery(query, "poliqarpplus");
 		res = mapper.readTree(qs.toJSON());
@@ -674,7 +673,7 @@
 	};
 
 	@Test
-	public void testCoordinatedFields() throws QueryException, JsonProcessingException, IOException {
+	public void testCoordinatedFields() throws JsonProcessingException, IOException {
 		query = "[base=Mann&(cas=N|cas=A)]";
 		qs.setQuery(query, "poliqarpplus");
 		res = mapper.readTree(qs.toJSON());
@@ -705,7 +704,7 @@
 	}
 
 	@Test
-	public void testTokenSequence() throws QueryException, JsonProcessingException, IOException {
+	public void testTokenSequence() throws JsonProcessingException, IOException {
 		query = "[base=Mann][orth=Frau]";
 		qs.setQuery(query, "poliqarpplus");
 		res = mapper.readTree(qs.toJSON());
@@ -730,7 +729,7 @@
 	}
 
 	@Test
-	public void testDisjSegments() throws QueryException, JsonProcessingException, IOException {
+	public void testDisjSegments() throws JsonProcessingException, IOException {
 		query = "[base=der]|[base=das]";
 		qs.setQuery(query, "poliqarpplus");
 		res = mapper.readTree(qs.toJSON());
@@ -822,7 +821,7 @@
 	}
 
 	@Test
-	public void testTokenSpanSequence() throws QueryException, JsonProcessingException, IOException {
+	public void testTokenSpanSequence() throws JsonProcessingException, IOException {
 		query = "[base=Mann]<vp>";
 		qs.setQuery(query, "poliqarpplus");
 		res = mapper.readTree(qs.toJSON());
@@ -857,7 +856,7 @@
 	}
 
 	@Test 
-	public void testClasses() throws QueryException, JsonProcessingException, IOException {
+	public void testClasses() throws JsonProcessingException, IOException {
 		query = "{[base=Mann]}";
 		qs.setQuery(query, "poliqarpplus");
 		res = mapper.readTree(qs.toJSON());
@@ -912,7 +911,7 @@
 	}
 
 	@Test
-	public void testFocusSplit() throws QueryException, JsonProcessingException, IOException {
+	public void testFocusSplit() throws JsonProcessingException, IOException {
 		query = "focus([orth=Der]{[orth=Mann]})";
 		qs.setQuery(query, "poliqarpplus");
 		res = mapper.readTree(qs.toJSON());
@@ -997,7 +996,7 @@
 	}
 
 	@Test
-	public void testSubmatch() throws QueryException, JsonProcessingException, IOException {
+	public void testSubmatch() throws JsonProcessingException, IOException {
 		query = "submatch(1,:<s>)";
 		qs.setQuery(query, "poliqarpplus");
 		res = mapper.readTree(qs.toJSON());
@@ -1027,7 +1026,7 @@
 		assertEquals("Haus", 				res.at("/query/operands/0/operands/1/wrap/key").asText());
 	}
 	@Test
-	public void testRelations() throws QueryException, JsonProcessingException, IOException {
+	public void testRelations() throws JsonProcessingException, IOException {
 		query = "relatesTo(<s>,<np>)";
 		qs.setQuery(query, "poliqarpplus");
 		res = mapper.readTree(qs.toJSON());
@@ -1104,7 +1103,7 @@
 	}
 
 	@Test
-	public void testAlign() throws QueryException, JsonProcessingException, IOException {
+	public void testAlign() throws JsonProcessingException, IOException {
 		query = "[orth=der]^[orth=Mann]";
 		qs.setQuery(query, "poliqarpplus");
 		res = mapper.readTree(qs.toJSON());
@@ -1157,7 +1156,7 @@
 	}
 
 	@Test
-	public void testSimpleQueries() throws QueryException, JsonProcessingException, IOException {
+	public void testSimpleQueries() throws JsonProcessingException, IOException {
 		query = "Baum";
 		qs.setQuery(query, "poliqarpplus");
 		res = mapper.readTree(qs.toJSON());