externalised CQLF object generation: now in util class CqlfObjectGenerator instead of abstract superclass ("composition over inheritance")
diff --git a/src/main/java/de/ids_mannheim/korap/query/serialize/AbstractQueryProcessor.java b/src/main/java/de/ids_mannheim/korap/query/serialize/AbstractQueryProcessor.java
index 2b899ab..5e3ef4f 100644
--- a/src/main/java/de/ids_mannheim/korap/query/serialize/AbstractQueryProcessor.java
+++ b/src/main/java/de/ids_mannheim/korap/query/serialize/AbstractQueryProcessor.java
@@ -17,8 +17,6 @@
 	
 	public abstract void process(String query) throws QueryException;
 	
-	public static final Integer MAXIMUM_DISTANCE = 100; 
-
 	Logger log;
 	/**
 	 *  The query
@@ -97,300 +95,4 @@
 	public Map<String, Object> getRequestMap() {
 		return requestMap;
 	}
-	
-	protected LinkedHashMap<String, Object> makeSpan() {
-		LinkedHashMap<String, Object> span = new LinkedHashMap<String, Object>();
-		span.put("@type", "korap:span");
-		return span;
-	}
-	
-	protected LinkedHashMap<String, Object> makeSpan(String key) {
-		LinkedHashMap<String, Object> span = new LinkedHashMap<String, Object>();
-		span.put("@type", "korap:span");
-		span.put("key", key);
-		return span;
-	}
-	
-	protected LinkedHashMap<String, Object> makeTerm() {
-		LinkedHashMap<String, Object> term = new LinkedHashMap<String, Object>();
-		term.put("@type", "korap:term");
-		return term;
-	}
-	
-	protected LinkedHashMap<String, Object> makeTermGroup(String relation) {
-		LinkedHashMap<String, Object> term = new LinkedHashMap<String, Object>();
-		term.put("@type", "korap:termGroup");
-		term.put("relation", "relation:"+relation);
-		term.put("operands", new ArrayList<Object>());
-		return term;
-	}
-	
-	protected LinkedHashMap<String, Object> makeDoc() {
-		LinkedHashMap<String, Object> term = new LinkedHashMap<String, Object>();
-		term.put("@type", "korap:doc");
-		return term;
-	}
-	
-	protected LinkedHashMap<String, Object> makeDocGroup(String relation) {
-		LinkedHashMap<String, Object> term = new LinkedHashMap<String, Object>();
-		term.put("@type", "korap:docGroup");
-		term.put("operation", "operation:"+relation);
-		term.put("operands", new ArrayList<Object>());
-		return term;
-	}
-	
-	protected LinkedHashMap<String, Object> makeToken() {
-		LinkedHashMap<String, Object> token = new LinkedHashMap<String, Object>();
-		token.put("@type", "korap:token");
-		return token;
-	}
-	
-	protected LinkedHashMap<String, Object> makeGroup(String operation) {
-		LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
-		group.put("@type", "korap:group");
-		group.put("operation", "operation:"+operation);
-		group.put("operands", new ArrayList<Object>());
-		return group;
-	}
-	
-	protected LinkedHashMap<String, Object> makeRepetition(Integer min, Integer max) {
-		LinkedHashMap<String, Object> group = makeGroup("repetition");
-		group.put("boundary", makeBoundary(min, max));
-		group.put("min", min);
-		if (max != null) {
-			group.put("max", max);
-		}
-		addMessage(303, "Deprecated 2014-07-24: 'min' and 'max' to be supported until 3 months from deprecation date.");
-		return group;
-	}
-	
-	@Deprecated
-	protected LinkedHashMap<String, Object> makePosition(String frame) {
-		LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
-		group.put("@type", "korap:group");
-		group.put("operation", "operation:position");
-		group.put("frame", "frame:"+frame);
-		group.put("operands", new ArrayList<Object>());
-		return group;
-	}
-	
-	protected LinkedHashMap<String, Object> makePosition(String[] allowedFrames, String[] classRefCheck) {
-		LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
-		group.put("@type", "korap:group");
-		group.put("operation", "operation:position");
-		group.put("frames", Arrays.asList(allowedFrames));
-		group.put("operands", new ArrayList<Object>());
-		// DEPRECATED 'frame'
-		if (classRefCheck == null || classRefCheck.length==0) classRefCheck = new String[]{"classRefCheck:includes"};
-		String frame = "";
-		
-		if (allowedFrames.length==0 && classRefCheck[0]=="classRefCheck:includes") {
-			frame = "frame:contains";
-		} else if (allowedFrames[0]=="frames:overlapsLeft" && allowedFrames[1]=="frames:overlapsRight" && classRefCheck[0]=="classRefCheck:intersects") {
-			frame = "frame:overlaps";
-		} else if (allowedFrames[0]=="frames:startswith" && classRefCheck[0]=="classRefCheck:includes") {
-			frame = "frame:startswith";
-		} else if (allowedFrames[0]=="frames:endswith" && classRefCheck[0]=="classRefCheck:includes") {
-			frame = "frame:endswith";
-		} else if (allowedFrames[0]=="frames:matches" && classRefCheck[0]=="classRefCheck:includes" && classRefCheck.length==1) {
-			frame = "frame:matches";
-		} else if (allowedFrames[0]=="frames:matches" && classRefCheck[0]=="classRefCheck:includes" && classRefCheck[1]=="classRefCheck:unequals") {
-			frame = "frame:matches";
-		} else if (allowedFrames[0]=="frames:matches" && classRefCheck[0]=="classRefCheck:equals") {
-			frame = "frame:matches";			
-		} else if (allowedFrames[0]=="frames:contains" && classRefCheck[0]=="classRefCheck:includes") {
-			frame = "frame:contains";
-		} else if (allowedFrames[0]=="frames:startswith" && classRefCheck[0]=="classRefCheck:intersects") {
-			frame = "frame:overlapsLeft";
-		} else if (allowedFrames[0]=="frames:endswith" && classRefCheck[0]=="classRefCheck:intersects") {
-			frame = "frame:overlapsRight";
-		} else if (allowedFrames[0]=="frames:matches" && classRefCheck[0]=="classRefCheck:intersects") {
-			frame = "frame:matches";
-		} else if (allowedFrames[0]=="frames:matches" && classRefCheck[0]=="classRefCheck:unequals") {
-			frame = "frame:matches";
-		} else if (allowedFrames[0]=="frames:matches" && classRefCheck[0]=="classRefCheck:equals") {
-			frame = "frame:matches";
-		} else if (allowedFrames[0]=="frames:contains" && classRefCheck[0]=="classRefCheck:intersects") {
-			frame = "frame:contains";
-		}
-		group.put("frame", frame);
-		addMessage(StatusCodes.DEPRECATED_QUERY_ELEMENT, "Deprecated 2014-09-22: 'frame' only to be supported until 3 months from deprecation date. " +
-				"Position frames are now expressed through 'frames'.");
-		return group;
-	}
-	
-	protected LinkedHashMap<String, Object> makeSpanClass(int classCount) {
-		return makeSpanClass(classCount, true);
-	}
-	
-	protected LinkedHashMap<String, Object> makeSpanClass(int classCount, boolean setBySystem) {
-		LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
-		group.put("@type", "korap:group");
-		group.put("operation", "operation:class");
-		if (setBySystem) {
-			group.put("class", 128+classCount);
-			group.put("classOut", 128+classCount);
-			addMessage("A class has been introduced into the backend representation of " +
-					"your query for later reference to a part of the query. The class id is "+(128+classCount));
-		} else {
-			group.put("class", classCount);
-			group.put("classOut", classCount);
-		}
-		addMessage(StatusCodes.DEPRECATED_QUERY_ELEMENT, "Deprecated 2014-10-07: 'class' only to be supported until 3 months from deprecation date. " +
-				"Classes are now defined using the 'classOut' attribute.");
-		group.put("operands", new ArrayList<Object>());
-		return group;
-	}
-	
-	protected LinkedHashMap<String, Object> makeClassRefCheck(ArrayList<String> check, Integer[] classIn, int classOut) {
-		LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
-		group.put("@type", "korap:group");
-		group.put("operation", "operation:class");
-		group.put("classRefCheck", check);
-		group.put("classIn", Arrays.asList(classIn));
-		group.put("classOut", classOut);
-		group.put("class", classOut);
-		addMessage(StatusCodes.DEPRECATED_QUERY_ELEMENT, "Deprecated 2014-10-07: 'class' only to be supported until 3 months from deprecation date. " +
-				"Classes are now defined using the 'classOut' attribute.");
-		group.put("operands", new ArrayList<Object>());
-		return group;
-	}
-	
-	protected LinkedHashMap<String, Object> makeClassRefOp(String operation, Integer[] classIn, int classOut) {
-		LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
-		group.put("@type", "korap:group");
-		group.put("operation", "operation:class");
-		group.put("classRefOp", operation);
-		group.put("classIn", Arrays.asList(classIn));
-		group.put("classOut", classOut);
-		group.put("operands", new ArrayList<Object>());
-		return group;
-	}
-	
-	@Deprecated
-	protected LinkedHashMap<String, Object> makeTreeRelation(String reltype) {
-		LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
-		group.put("@type", "korap:treeRelation");
-		if (reltype != null) group.put("reltype", reltype);
-		return group;
-	}
-	
-	protected LinkedHashMap<String, Object> makeRelation() {
-		LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
-		group.put("@type", "korap:relation");
-		return group;
-	}
-	
-	protected LinkedHashMap<String, Object> makeBoundary(Integer min, Integer max) {
-		LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
-		group.put("@type", "korap:boundary");
-		group.put("min", min);
-		if (max != null) {
-			group.put("max", max);
-		}
-		return group;
-	}
-
-	protected LinkedHashMap<String, Object> makeDistance(String key, Integer min, Integer max) {
-		LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
-		if (key.equals("w")) {
-			group.put("@type", "korap:distance");
-		} else {
-			group.put("@type", "cosmas:distance");
-		}
-		group.put("key", key);
-		group.put("boundary", makeBoundary(min, max));
-		group.put("min", min);
-		if (max != null) {
-			group.put("max", max);
-		}
-		addMessage(StatusCodes.DEPRECATED_QUERY_ELEMENT, "Deprecated 2014-07-24: 'min' and 'max' to be supported until 3 months from deprecation date.");
-		return group;
-	}
-	
-	protected LinkedHashMap<String, Object> makeReference(ArrayList<Integer> classRefs, String operation) {
-		LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
-		group.put("@type", "korap:reference");
-		group.put("operation", "operation:"+operation);
-		if (classRefs!= null && !classRefs.isEmpty()) {
-			group.put("classRef", classRefs);
-		}
-		return group;
-	}
-	
-	protected LinkedHashMap<String, Object> makeReference(ArrayList<Integer> classRefs) {
-		return makeReference(classRefs, "focus");
-	}
-	
-	protected LinkedHashMap<String, Object> makeReference(int classRef, String operation, boolean setBySystem) {
-		ArrayList<Integer> classRefs = new ArrayList<Integer>();
-		if (setBySystem) classRef = classRef+128;
-		classRefs.add(classRef);
-		return makeReference(classRefs, operation);
-	}
-	
-	protected LinkedHashMap<String, Object> makeReference(int classRef, boolean setBySystem) {
-		ArrayList<Integer> classRefs = new ArrayList<Integer>();
-		if (setBySystem) classRef = classRef+128;
-		classRefs.add(classRef);
-		return makeReference(classRefs, "focus");
-	}
-	
-	protected LinkedHashMap<String, Object> makeReference(int classRef) {
-		return makeReference(classRef, false);
-	}
-	
-	protected LinkedHashMap<String, Object> makeResetReference() {
-		LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
-		group.put("@type", "korap:reference");
-		group.put("operation", "operation:focus");
-		group.put("reset", true);
-		group.put("operands", new ArrayList<Object>());
-		return group;
-	}
-	
-	protected LinkedHashMap<String, Object> makeSpanReference(Integer[] spanRef, String operation) {
-		LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
-		group.put("@type", "korap:reference");
-		group.put("operation", "operation:"+operation);
-		group.put("spanRef", Arrays.asList(spanRef));
-		group.put("operands", new ArrayList<Object>());
-		return group;
-	}
-	
-	protected void addOperandsToGroup(LinkedHashMap<String, Object> group) {
-		ArrayList<Object> operands = new ArrayList<Object>();
-		group.put("operands", operands);
-	}
-	
-	protected LinkedHashMap<String, Object> wrapInReference(LinkedHashMap<String, Object> group, Integer classId) {
-		LinkedHashMap<String, Object> refGroup = makeReference(classId);
-		ArrayList<Object> operands = new ArrayList<Object>();
-		operands.add(group);
-		refGroup.put("operands", operands);
-		return refGroup;
-	}
-
-	@SuppressWarnings("unchecked")
-	protected LinkedHashMap<String, Object> wrapInClass(LinkedHashMap<String, Object> group, Integer classId) {
-		LinkedHashMap<String, Object> classGroup = makeSpanClass(classId, true);
-		((ArrayList<Object>) classGroup.get("operands")).add(group);
-		return classGroup;
-	}
-	
-	/**
-	 * Ensures that a distance or quantification value does not exceed the allowed maximum value. 
-	 * @param number
-	 * @return The input number if it is below the allowed maximum value, else the maximum value. 
-	 */
-	protected int cropToMaxValue(int number) {
-		if (number > MAXIMUM_DISTANCE) {
-			number = MAXIMUM_DISTANCE; 
-			String warning = String.format("You specified a distance between two segments that is greater than " +
-					"the allowed max value of %d. Your query will be re-interpreted using a distance of %d.", MAXIMUM_DISTANCE, MAXIMUM_DISTANCE);
-			addWarning(warning);
-			log.warn("User warning: "+warning);
-		}
-		return number;
-	}
 }
diff --git a/src/main/java/de/ids_mannheim/korap/query/serialize/AnnisQueryProcessor.java b/src/main/java/de/ids_mannheim/korap/query/serialize/AnnisQueryProcessor.java
index 8f89546..07fe43e 100644
--- a/src/main/java/de/ids_mannheim/korap/query/serialize/AnnisQueryProcessor.java
+++ b/src/main/java/de/ids_mannheim/korap/query/serialize/AnnisQueryProcessor.java
@@ -23,6 +23,7 @@
 import de.ids_mannheim.korap.query.parse.annis.AqlLexer;
 import de.ids_mannheim.korap.query.parse.annis.AqlParser;
 import de.ids_mannheim.korap.query.serialize.util.Antlr4DescriptiveErrorListener;
+import de.ids_mannheim.korap.query.serialize.util.CqlfObjectGenerator;
 import de.ids_mannheim.korap.query.serialize.util.QueryException;
 
 /**
@@ -70,7 +71,6 @@
 	 * but are to be integrated into the AqlTree at a later point (namely as operands of the respective group). Therefore, store references to these
 	 * nodes here and exclude the operands from being written into the query map individually.   
 	 */
-	private LinkedList<String> operandOnlyNodeRefs = new LinkedList<String>();
 	private List<ParseTree> globalLingTermNodes = new ArrayList<ParseTree>();
 	private int totalRelationCount;
 	/**
@@ -87,6 +87,7 @@
 	 * @param parser The ANTLR parser instance that generated the parse tree
 	 */
 	public AnnisQueryProcessor(String query) {
+		CqlfObjectGenerator.setQueryProcessor(this);
 		try {
 			process(query);
 		} catch (QueryException e) {
@@ -227,7 +228,7 @@
 	private void processExprTop(ParseTree node) {
 		List<ParseTree> andTopExprs = getChildrenWithCat(node, "andTopExpr");
 		if (andTopExprs.size() > 1) {
-			LinkedHashMap<String, Object> topOr = makeGroup("or");
+			LinkedHashMap<String, Object> topOr = CqlfObjectGenerator.makeGroup("or");
 			requestMap.put("query", topOr);
 			objectStack.push(topOr);
 		}
@@ -238,11 +239,11 @@
 		String firstChildNodeCat = getNodeCat(node.getChild(0));
 		LinkedHashMap<String, Object> object = null;
 		if (firstChildNodeCat.equals("node")) {
-			object = makeSpan();
+			object = CqlfObjectGenerator.makeSpan();
 		} else if (firstChildNodeCat.equals("tok")) {
-			object = makeToken();
+			object = CqlfObjectGenerator.makeToken();
 			if (node.getChildCount() > 1) { // empty tokens do not wrap a term
-				LinkedHashMap<String, Object> term = makeTerm();
+				LinkedHashMap<String, Object> term = CqlfObjectGenerator.makeTerm();
 				term.put("layer", "orth");
 				object.put("wrap", term);
 			}
@@ -251,17 +252,17 @@
 			// TODO generalize the list below -> look up layers associated with tokens rather than spans somewhere
 			HashMap<String, Object> qNameParse = parseQNameNode(node.getChild(0));
 			if (Arrays.asList(new String[]{"p", "lemma", "m", "orth"}).contains(qNameParse.get("layer"))) { 
-				object = makeToken();
-				LinkedHashMap<String, Object> term = makeTerm();
+				object = CqlfObjectGenerator.makeToken();
+				LinkedHashMap<String, Object> term = CqlfObjectGenerator.makeTerm();
 				object.put("wrap", term);
 				term.putAll(qNameParse);
 			} else {
-				object = makeSpan();
+				object = CqlfObjectGenerator.makeSpan();
 				object.putAll(qNameParse);
 			}
 		} else if (firstChildNodeCat.equals("textSpec")) {
-			object = makeToken();
-			LinkedHashMap<String, Object> term = makeTerm();
+			object = CqlfObjectGenerator.makeToken();
+			LinkedHashMap<String, Object> term = CqlfObjectGenerator.makeTerm();
 			object.put("wrap", term);
 			term.put("layer", "orth");
 			term.putAll(parseTextSpec(node.getChild(0)));
@@ -312,12 +313,12 @@
 			if (nodeReferencesTotal.get(ref) > 1) {
 				if (nodeReferencesProcessed.get(ref)==0) {
 					refClassMapping.put(ref, classCounter);
-					operand = wrapInClass(operand, classCounter++);
+					operand = CqlfObjectGenerator.wrapInClass(operand, classCounter++);
 				} else if (nodeReferencesProcessed.get(ref)>0 && nodeReferencesTotal.get(ref)>1) {
 					try {
-						operand = wrapInReference(operandStack.pop(), refClassMapping.get(ref));
+						operand = CqlfObjectGenerator.wrapInReference(operandStack.pop(), refClassMapping.get(ref));
 					} catch (NoSuchElementException e) {
-						operand = makeReference(refClassMapping.get(ref));
+						operand = CqlfObjectGenerator.makeReference(refClassMapping.get(ref));
 					}
 				}
 				nodeReferencesProcessed.put(ref, nodeReferencesProcessed.get(ref)+1);
@@ -351,14 +352,14 @@
 			// This is modeled here...
 			if (reltype.equals("commonparent") || reltype.equals("commonancestor")) {
 				// make an (outer) group and an inner group containing the dummy node or previous relations
-				group = makeGroup("relation");
-				LinkedHashMap<String,Object> innerGroup = makeGroup("relation");
-				LinkedHashMap<String,Object> relation = makeRelation();
-				LinkedHashMap<String,Object> term = makeTerm();
+				group = CqlfObjectGenerator.makeGroup("relation");
+				LinkedHashMap<String,Object> innerGroup = CqlfObjectGenerator.makeGroup("relation");
+				LinkedHashMap<String,Object> relation = CqlfObjectGenerator.makeRelation();
+				LinkedHashMap<String,Object> term = CqlfObjectGenerator.makeTerm();
 				term.put("layer", "c");
 				relation.put("wrap", term);
 				// commonancestor is an indirect commonparent relation
-				if (reltype.equals("commonancestor")) relation.put("boundary", makeBoundary(1, null));
+				if (reltype.equals("commonancestor")) relation.put("boundary", CqlfObjectGenerator.makeBoundary(1, null));
 				group.put("relation", relation);
 				innerGroup.put("relation", relation);
 				// Get operands list before possible re-assignment of 'group' (see following 'if')
@@ -366,23 +367,23 @@
 				ArrayList<Object> innerOperands  = (ArrayList<Object>) innerGroup.get("operands");
 				// for lowest level, add the underspecified node as first operand and wrap it in a class group
 				if (i == 1) {
-					innerOperands.add(wrapInClass(makeSpan(), classCounter));
+					innerOperands.add(CqlfObjectGenerator.wrapInClass(CqlfObjectGenerator.makeSpan(), classCounter));
 					// add the first operand and wrap the whole group in a focusing reference 
 					innerOperands.add(operand1);
-					innerGroup = wrapInReference(innerGroup, classCounter);
+					innerGroup = CqlfObjectGenerator.wrapInReference(innerGroup, classCounter);
 					outerOperands.add(innerGroup);
 				} else {
 					outerOperands.add(operandStack.pop());
 				}
 				// Lookahead: if next operator is not commonparent or commonancestor, wrap in class for accessibility
 				if (i < node.getChildCount()-2 && !getNodeCat(node.getChild(i+2).getChild(0)).startsWith("common")) {
-					operand2 = wrapInClass(operand2, ++classCounter);
+					operand2 = CqlfObjectGenerator.wrapInClass(operand2, ++classCounter);
 				}
 				outerOperands.add(operand2);
 
 				// Wrap in another reference object in case other relations are following
 				if (i < node.getChildCount()-2) {
-					group = wrapInReference(group, classCounter);
+					group = CqlfObjectGenerator.wrapInReference(group, classCounter);
 				}
 				// All other n-ary linguistic relations have special 'relation' attributes defined in CQLF and can be
 				// handled more easily...
@@ -395,12 +396,12 @@
 					groupType = "relation";
 				}
 				if (groupType.equals("relation") || groupType.equals("treeRelation")) {
-					group = makeGroup(groupType);
+					group = CqlfObjectGenerator.makeGroup(groupType);
 					LinkedHashMap<String, Object> relation = new LinkedHashMap<String, Object>();
 					putAllButGroupType(relation, operatorGroup);
 					group.put("relation", relation);
 				} else if (groupType.equals("sequence")) {
-					group = makeGroup(groupType);
+					group = CqlfObjectGenerator.makeGroup(groupType);
 					putAllButGroupType(group, operatorGroup);
 				} else if (groupType.equals("position")) {
 					group = new LinkedHashMap<String,Object>();
@@ -414,16 +415,16 @@
 				ParseTree rightChildSpec = getFirstChildWithCat(node.getChild(i).getChild(0), "@r");
 				if (leftChildSpec != null || rightChildSpec != null) {
 					String frame = (leftChildSpec!=null) ? "frames:startswith" : "frames:endswith";
-					LinkedHashMap<String,Object> positionGroup = makePosition(new String[]{frame}, null);
-					operand2 = wrapInClass(operand2, ++classCounter);
+					LinkedHashMap<String,Object> positionGroup = CqlfObjectGenerator.makePosition(new String[]{frame}, null);
+					operand2 = CqlfObjectGenerator.wrapInClass(operand2, ++classCounter);
 					((ArrayList<Object>) positionGroup.get("operands")).add(group);
-					((ArrayList<Object>) positionGroup.get("operands")).add(makeReference(classCounter,true));
+					((ArrayList<Object>) positionGroup.get("operands")).add(CqlfObjectGenerator.makeReference(classCounter,true));
 					group = positionGroup;
 				}
 				
 				// Wrap in reference object in case other relations are following
 				if (i < node.getChildCount()-2) {
-					group = wrapInReference(group, classCounter);
+					group = CqlfObjectGenerator.wrapInReference(group, classCounter);
 				}
 
 				// Inject operands.
@@ -439,14 +440,14 @@
 					if (i == 1) {
 						// for the first operator, include both operands
 						if (operand1 != null) operands.add(operand1);
-						if (operand2 != null) operands.add(wrapInClass(operand2, classCounter++));
+						if (operand2 != null) operands.add(CqlfObjectGenerator.wrapInClass(operand2, classCounter++));
 						// Don't put this into the super object directly but store on operandStack 
 						// (because this group will have to be an operand of a subsequent operator)
 						operandStack.push(group);
 						// for all subsequent operators, only take the 2nd operand (first was already added by previous operator)
 					} else if (i < node.getChildCount()-2) {
 						// for all intermediate operators, include other previous groups and 2nd operand. Store this on the operandStack, too.
-						if (operand2 != null) operands.add(wrapInClass(operand2, classCounter++));
+						if (operand2 != null) operands.add(CqlfObjectGenerator.wrapInClass(operand2, classCounter++));
 						operands.add(0, operandStack.pop());
 						operandStack.push(group);
 					} else if (i == node.getChildCount()-2) {
@@ -479,7 +480,7 @@
 	 */
 	private LinkedHashMap<String, Object> parseUnaryOperator(ParseTree node) {
 		LinkedHashMap<String, Object> attr = new LinkedHashMap<String, Object>();
-		LinkedHashMap<String, Object> term = makeTerm();
+		LinkedHashMap<String, Object> term = CqlfObjectGenerator.makeTerm();
 		String op = node.getChild(1).toStringTree(parser).substring(1);
 		if (op.equals("arity") || op.equals("tokenarity")) {
 			LinkedHashMap<String, Object> boundary = boundaryFromRangeSpec(node.getChild(3), false);
@@ -497,13 +498,13 @@
 		String operator = getNodeCat(operatorNode);
 		// DOMINANCE
 		if (operator.equals("dominance")) {
-			relation = makeRelation();
+			relation = CqlfObjectGenerator.makeRelation();
 			relation.put("groupType", "relation");
 			ParseTree qName = getFirstChildWithCat(operatorNode, "qName");
 			ParseTree edgeSpecNode = getFirstChildWithCat(operatorNode, "edgeSpec");
 			ParseTree star = getFirstChildWithCat(operatorNode, "*");
 			ParseTree rangeSpec = getFirstChildWithCat(operatorNode, "rangeSpec");
-			LinkedHashMap<String,Object> term = makeTerm();
+			LinkedHashMap<String,Object> term = CqlfObjectGenerator.makeTerm();
 			term.put("layer", "c");
 			if (qName != null) term = parseQNameNode(qName);
 			if (edgeSpecNode != null) {
@@ -513,31 +514,31 @@
 					((ArrayList<Object>) edgeSpec.get("operands")).add(term);
 					term = edgeSpec;
 				} else {
-					term = makeTermGroup("and");
+					term = CqlfObjectGenerator.makeTermGroup("and");
 					ArrayList<Object> termGroupOperands = (ArrayList<Object>) term.get("operands");
 					termGroupOperands.add(edgeSpec);
-					LinkedHashMap<String,Object> constTerm = makeTerm();
+					LinkedHashMap<String,Object> constTerm = CqlfObjectGenerator.makeTerm();
 					constTerm.put("layer", "c");
 					termGroupOperands.add(constTerm);
 				}
 			}
-			if (star != null) relation.put("boundary", makeBoundary(0, null));
+			if (star != null) relation.put("boundary", CqlfObjectGenerator.makeBoundary(0, null));
 			if (rangeSpec != null) relation.put("boundary", boundaryFromRangeSpec(rangeSpec));
 			relation.put("wrap", term);
 		}
 		else if (operator.equals("pointing")) {
 			//			String reltype = operatorNode.getChild(1).toStringTree(parser);
-			relation = makeRelation();
+			relation = CqlfObjectGenerator.makeRelation();
 			relation.put("groupType", "relation");
 			ParseTree qName = getFirstChildWithCat(operatorNode, "qName");
 			ParseTree edgeSpec = getFirstChildWithCat(operatorNode, "edgeSpec");
 			ParseTree star = getFirstChildWithCat(operatorNode, "*");
 			ParseTree rangeSpec = getFirstChildWithCat(operatorNode, "rangeSpec");
 			//			if (qName != null) relation.putAll(parseQNameNode(qName));
-			LinkedHashMap<String,Object> term = makeTerm();
+			LinkedHashMap<String,Object> term = CqlfObjectGenerator.makeTerm();
 			if (qName != null) term.putAll(parseQNameNode(qName));
 			if (edgeSpec != null) term.putAll(parseEdgeSpec(edgeSpec));
-			if (star != null) relation.put("boundary", makeBoundary(0, null));
+			if (star != null) relation.put("boundary", CqlfObjectGenerator.makeBoundary(0, null));
 			if (rangeSpec != null) relation.put("boundary", boundaryFromRangeSpec(rangeSpec));
 			relation.put("wrap", term);
 		}
@@ -548,7 +549,7 @@
 			ParseTree star = getFirstChildWithCat(operatorNode, "*");
 			ArrayList<Object> distances = new ArrayList<Object>();
 			if (star != null) {
-				distances.add(makeDistance("w", 0, null));
+				distances.add(CqlfObjectGenerator.makeDistance("w", 0, null));
 				relation.put("distances", distances);
 			}
 			if (rangeSpec != null) {
@@ -586,7 +587,7 @@
 			}
 //			relation.put("frames", frames);
 //			relation.put("sharedClasses", sharedClasses);
-			relation = makePosition(frames, new String[]{});
+			relation = CqlfObjectGenerator.makePosition(frames, new String[]{});
 			relation.put("groupType", "position");
 		}
 		else if (operator.equals("identity")) {
@@ -606,7 +607,7 @@
 		List<ParseTree> annos = getChildrenWithCat(edgeSpec, "edgeAnno");
 		if (annos.size() == 1) return parseEdgeAnno(annos.get(0));
 		else {
-			LinkedHashMap<String,Object> termGroup = makeTermGroup("and");
+			LinkedHashMap<String,Object> termGroup = CqlfObjectGenerator.makeTermGroup("and");
 			ArrayList<Object> operands = (ArrayList<Object>) termGroup.get("operands");
 			for (ParseTree anno : annos) {
 				operands.add(parseEdgeAnno(anno));
@@ -639,7 +640,7 @@
 		if (expandToMax) max = null;
 		if (rangeSpec.getChildCount()==3) 
 			max = Integer.parseInt(rangeSpec.getChild(2).toStringTree(parser));
-		return makeBoundary(min, max);
+		return CqlfObjectGenerator.makeBoundary(min, max);
 	}
 
 	private LinkedHashMap<String, Object> parseDistance(ParseTree rangeSpec) {
@@ -647,7 +648,7 @@
 		Integer max = null;
 		if (rangeSpec.getChildCount()==3) 
 			max = Integer.parseInt(rangeSpec.getChild(2).toStringTree(parser));
-		return makeDistance("w", min, max);
+		return CqlfObjectGenerator.makeDistance("w", min, max);
 	}
 
 	private LinkedHashMap<String, Object> parseTextSpec(ParseTree node) {
diff --git a/src/main/java/de/ids_mannheim/korap/query/serialize/Antlr3AbstractQueryProcessor.java b/src/main/java/de/ids_mannheim/korap/query/serialize/Antlr3AbstractQueryProcessor.java
index 33adaca..4f0775f 100644
--- a/src/main/java/de/ids_mannheim/korap/query/serialize/Antlr3AbstractQueryProcessor.java
+++ b/src/main/java/de/ids_mannheim/korap/query/serialize/Antlr3AbstractQueryProcessor.java
@@ -10,7 +10,7 @@
 
 public abstract class Antlr3AbstractQueryProcessor extends AbstractQueryProcessor {
 	
-	public Parser parser;
+	protected Parser parser;
 
     /**
      * Returns the category (or 'label') of the root of a (sub-) ParseTree (ANTLR 3).
@@ -18,7 +18,7 @@
      * @param node
      * @return
      */
-    public static String getNodeCat(Tree node) {
+    protected static String getNodeCat(Tree node) {
         String nodeCat = node.toStringTree();
         Pattern p = Pattern.compile("\\((.*?)\\s"); // from opening parenthesis to 1st whitespace
         Matcher m = p.matcher(node.toStringTree());
@@ -35,7 +35,7 @@
      * @param childCat The category of the potential child.
      * @return true iff one or more children belong to the specified category
      */
-    public static boolean hasChild(Tree node, String childCat) {
+    protected static boolean hasChild(Tree node, String childCat) {
         for (int i = 0; i < node.getChildCount(); i++) {
             if (getNodeCat(node.getChild(i)).equals(childCat)) {
                 return true;
@@ -43,8 +43,21 @@
         }
         return false;
     }
+    
+    protected boolean hasDescendant(Tree node, String childCat) {
+        for (int i = 0; i < node.getChildCount(); i++) {
+            Tree child = node.getChild(i);
+            if (getNodeCat(child).equals(childCat)) {
+                return true;
+            }
+            if (hasDescendant(child, childCat)) {
+                return true;
+            }
+        }
+        return false;
+    }
 
-    public static List<Tree> getChildren(Tree node) {
+    protected static List<Tree> getChildren(Tree node) {
         ArrayList<Tree> children = new ArrayList<Tree>();
         for (int i = 0; i < node.getChildCount(); i++) {
             children.add(node.getChild(i));
@@ -52,7 +65,7 @@
         return children;
     }
 
-    public static List<Tree> getChildrenWithCat(Tree node, String nodeCat) {
+    protected static List<Tree> getChildrenWithCat(Tree node, String nodeCat) {
         ArrayList<Tree> children = new ArrayList<Tree>();
         for (int i = 0; i < node.getChildCount(); i++) {
             if (getNodeCat(node.getChild(i)).equals(nodeCat)) {
@@ -62,12 +75,20 @@
         return children;
     }
     
-    public static Tree getFirstChildWithCat(Tree node, String nodeCat) {
-        for (int i = 0; i < node.getChildCount(); i++) {
-            if (getNodeCat(node.getChild(i)).equals(nodeCat)) {
-                return node.getChild(i);
-            }
-        }
+    protected static Tree getFirstChildWithCat(Tree node, String nodeCat) {
+        return getNthChildWithCat(node, nodeCat, 1);
+    }
+    
+    protected static Tree getNthChildWithCat(Tree node, String nodeCat, int n) {
+    	int counter = 0;
+    	for (int i = 0; i < node.getChildCount(); i++) {
+    		if (getNodeCat(node.getChild(i)).equals(nodeCat)) {
+    			counter++;
+    			if (counter == n) {
+    				return node.getChild(i);
+    			}
+    		}
+    	}
         return null;
     }
 }
diff --git a/src/main/java/de/ids_mannheim/korap/query/serialize/Antlr4AbstractQueryProcessor.java b/src/main/java/de/ids_mannheim/korap/query/serialize/Antlr4AbstractQueryProcessor.java
index da09f3c..6873678 100644
--- a/src/main/java/de/ids_mannheim/korap/query/serialize/Antlr4AbstractQueryProcessor.java
+++ b/src/main/java/de/ids_mannheim/korap/query/serialize/Antlr4AbstractQueryProcessor.java
@@ -1,7 +1,6 @@
 package de.ids_mannheim.korap.query.serialize;
 
 import java.util.ArrayList;
-import java.util.Arrays;
 import java.util.List;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
@@ -22,7 +21,7 @@
      * @param node
      * @return
      */
-    public String getNodeCat(ParseTree node) {
+	protected String getNodeCat(ParseTree node) {
         String nodeCat = node.toStringTree(parser);
         Pattern p = Pattern.compile("\\((.*?)\\s"); // from opening parenthesis to 1st whitespace
         Matcher m = p.matcher(node.toStringTree(parser));
@@ -39,7 +38,7 @@
      * @param childCat The category of the potential child.
      * @return true iff one or more children belong to the specified category
      */
-    public boolean hasChild(ParseTree node, String childCat) {
+    protected boolean hasChild(ParseTree node, String childCat) {
         for (int i = 0; i < node.getChildCount(); i++) {
             if (getNodeCat(node.getChild(i)).equals(childCat)) {
                 return true;
@@ -48,7 +47,7 @@
         return false;
     }
 
-    public boolean hasDescendant(ParseTree node, String childCat) {
+    protected boolean hasDescendant(ParseTree node, String childCat) {
         for (int i = 0; i < node.getChildCount(); i++) {
             ParseTree child = node.getChild(i);
             if (getNodeCat(child).equals(childCat)) {
@@ -62,7 +61,7 @@
     }
     
 
-    public static List<ParseTree> getChildren(ParseTree node) {
+    protected static List<ParseTree> getChildren(ParseTree node) {
         ArrayList<ParseTree> children = new ArrayList<ParseTree>();
         for (int i = 0; i < node.getChildCount(); i++) {
                 children.add(node.getChild(i));
@@ -70,7 +69,7 @@
         return children;
     }
     
-    public List<ParseTree> getChildrenWithCat(ParseTree node, String nodeCat) {
+    protected List<ParseTree> getChildrenWithCat(ParseTree node, String nodeCat) {
         ArrayList<ParseTree> children = new ArrayList<ParseTree>();
         for (int i = 0; i < node.getChildCount(); i++) {
             if (getNodeCat(node.getChild(i)).equals(nodeCat)) {
@@ -80,11 +79,11 @@
         return children;
     }
 
-    public ParseTree getFirstChildWithCat(ParseTree node, String nodeCat) {
+    protected ParseTree getFirstChildWithCat(ParseTree node, String nodeCat) {
         return getNthChildWithCat(node, nodeCat, 1);
     }
     
-    public ParseTree getNthChildWithCat(ParseTree node, String nodeCat, int n) {
+    protected ParseTree getNthChildWithCat(ParseTree node, String nodeCat, int n) {
     	int counter = 0;
     	for (int i = 0; i < node.getChildCount(); i++) {
     		if (getNodeCat(node.getChild(i)).equals(nodeCat)) {
@@ -96,23 +95,4 @@
     	}
         return null;
     }
-    
-    /**
-     * Checks whether a node only serves as a container for another node (e.g. in (cq_segment ( cg_seg_occ ...)), the cq_segment node does not contain
-     * any information and only contains the cq_seg_occ node.  
-     * @param node The node to check
-     * @return true iff the node is a container only.
-     */
-    public boolean isContainerOnly(ParseTree node) {
-    	String[] validNodeNamesArray = "cq_segment sq_segment element empty_segments spanclass".split(" ");
-    	List<String> validNodeNames = Arrays.asList(validNodeNamesArray);
-    	List<ParseTree> children = getChildren(node);
-    	for (ParseTree child : children) {
-    		if (validNodeNames.contains(getNodeCat(child))) {
-    			return false;
-    		}
-    	}
-    	return true;
-    }
-	
-}
+}
\ No newline at end of file
diff --git a/src/main/java/de/ids_mannheim/korap/query/serialize/CollectionQueryProcessor.java b/src/main/java/de/ids_mannheim/korap/query/serialize/CollectionQueryProcessor.java
index 932e34d..26e86e3 100644
--- a/src/main/java/de/ids_mannheim/korap/query/serialize/CollectionQueryProcessor.java
+++ b/src/main/java/de/ids_mannheim/korap/query/serialize/CollectionQueryProcessor.java
@@ -3,6 +3,7 @@
 import de.ids_mannheim.korap.query.parse.collection.CollectionQueryLexer;
 import de.ids_mannheim.korap.query.parse.collection.CollectionQueryParser;
 import de.ids_mannheim.korap.query.serialize.util.Antlr4DescriptiveErrorListener;
+import de.ids_mannheim.korap.query.serialize.util.CqlfObjectGenerator;
 import de.ids_mannheim.korap.query.serialize.util.StatusCodes;
 import de.ids_mannheim.korap.query.serialize.util.QueryException;
 
@@ -42,14 +43,17 @@
     Integer stackedObjects = 0;
     
     public CollectionQueryProcessor() {
+    	CqlfObjectGenerator.setQueryProcessor(this);
 	}
     
     public CollectionQueryProcessor(boolean verbose) {
+    	CqlfObjectGenerator.setQueryProcessor(this);
     	CollectionQueryProcessor.verbose = verbose;
 	}
     
     public CollectionQueryProcessor(String query) throws QueryException {
-		process(query);
+    	CqlfObjectGenerator.setQueryProcessor(this);
+    	process(query);
 	}
 
 	@Override
@@ -86,7 +90,7 @@
 
         if (nodeCat.equals("relation")) {
         	String operator = node.getChild(1).getChild(0).toStringTree(parser).equals("&") ? "and" : "or"; 
-            LinkedHashMap<String, Object> relationGroup = makeDocGroup(operator);
+            LinkedHashMap<String, Object> relationGroup = CqlfObjectGenerator.makeDocGroup(operator);
             putIntoSuperObject(relationGroup);
             objectStack.push(relationGroup);
             stackedObjects++;
@@ -97,7 +101,7 @@
             String field = fieldNode.getChild(0).toStringTree(parser);
             ParseTree operatorNode = getFirstChildWithCat(node, "operator");
             ParseTree valueNode = getFirstChildWithCat(node, "value");
-            LinkedHashMap<String, Object> term = makeDoc();
+            LinkedHashMap<String, Object> term = CqlfObjectGenerator.makeDoc();
             term.put("key", field);
             term.putAll(parseValue(valueNode));
             String match = operatorNode.getText();
@@ -120,7 +124,7 @@
             ParseTree dateOpNode = getFirstChildWithCat(node, "dateOp");
             ParseTree dateNode = getFirstChildWithCat(node, "date");
 
-            LinkedHashMap<String, Object> term = makeDoc();
+            LinkedHashMap<String, Object> term = CqlfObjectGenerator.makeDoc();
             term.put("key", field);
             term.putAll(parseValue(dateNode));
             String match = dateOpNode.getText();
@@ -133,7 +137,7 @@
         }
         
         if (nodeCat.equals("token")) {
-			LinkedHashMap<String,Object> token = makeToken();
+			LinkedHashMap<String,Object> token = CqlfObjectGenerator.makeToken();
 			// handle negation
 			List<ParseTree> negations = getChildrenWithCat(node, "!");
 			boolean negated = false;
@@ -141,7 +145,7 @@
 			if (negations.size() % 2 == 1) negated = true;
 			if (getNodeCat(node.getChild(0)).equals("key")) {
 				// no 'term' child, but direct key specification: process here
-				LinkedHashMap<String,Object> term = makeTerm();
+				LinkedHashMap<String,Object> term = CqlfObjectGenerator.makeTerm();
 				String key = node.getChild(0).getText();
 				if (getNodeCat(node.getChild(0).getChild(0)).equals("regex")) {
 					isRegex = true;
@@ -365,7 +369,7 @@
 	private LinkedHashMap<String, Object> parseTermOrTermGroup(ParseTree node, boolean negatedGlobal, String mode) {
 		if (getNodeCat(node).equals("term")) {
 			String key = null;
-			LinkedHashMap<String,Object> term = makeTerm();
+			LinkedHashMap<String,Object> term = CqlfObjectGenerator.makeTerm();
 			// handle negation
 			boolean negated = negatedGlobal;
 			boolean isRegex = false;
@@ -433,7 +437,7 @@
 			// establish boolean relation
 			ParseTree boolOp = getFirstChildWithCat(node, "booleanOp"); 
 			String operator = boolOp.getText().equals("&") ? "and" : "or";
-			termGroup = makeTermGroup(operator);
+			termGroup = CqlfObjectGenerator.makeTermGroup(operator);
 			ArrayList<Object> operands = (ArrayList<Object>) termGroup.get("operands");
 			// recursion with left/right operands
 			operands.add(parseTermOrTermGroup(leftOp, negatedGlobal, mode));
diff --git a/src/main/java/de/ids_mannheim/korap/query/serialize/Cosmas2QueryProcessor.java b/src/main/java/de/ids_mannheim/korap/query/serialize/Cosmas2QueryProcessor.java
index 7023a94..f119b08 100644
--- a/src/main/java/de/ids_mannheim/korap/query/serialize/Cosmas2QueryProcessor.java
+++ b/src/main/java/de/ids_mannheim/korap/query/serialize/Cosmas2QueryProcessor.java
@@ -3,6 +3,7 @@
 import de.ids_mannheim.korap.query.parse.cosmas.c2psLexer;
 import de.ids_mannheim.korap.query.parse.cosmas.c2psParser;
 import de.ids_mannheim.korap.query.serialize.util.Antlr3DescriptiveErrorListener;
+import de.ids_mannheim.korap.query.serialize.util.CqlfObjectGenerator;
 import de.ids_mannheim.korap.query.serialize.util.ResourceMapper;
 import de.ids_mannheim.korap.query.serialize.util.StatusCodes;
 import de.ids_mannheim.korap.query.serialize.util.QueryException;
@@ -97,6 +98,7 @@
 	 * @throws QueryException
 	 */
 	public Cosmas2QueryProcessor(String query) throws QueryException {
+		CqlfObjectGenerator.setQueryProcessor(this);
 		this.query = query;
 		process(query);
 		log.info(">>> " + requestMap.get("query") + " <<<");
@@ -312,7 +314,7 @@
 		Tree begConditions = getFirstChildWithCat(node.getChild(optsChild), "TPBEG");
 		Tree endConditions = getFirstChildWithCat(node.getChild(optsChild), "TPEND");
 
-		LinkedHashMap<String, Object> submatchgroup = makeReference(128+classCounter);
+		LinkedHashMap<String, Object> submatchgroup = CqlfObjectGenerator.makeReference(128+classCounter);
 		ArrayList<Object> submatchOperands = new ArrayList<Object>();
 		submatchgroup.put("operands", submatchOperands);
 		putIntoSuperObject(submatchgroup);
@@ -338,14 +340,14 @@
 			if (conditionGroups.size()==1) {
 				submatchOperands.add(conditionGroup);
 			} else if (conditionCount < conditionGroups.size()) {
-				LinkedHashMap<String,Object> matchesGroup = makePosition(new String[]{"frames:matches"}, new String[0]);
+				LinkedHashMap<String,Object> matchesGroup = CqlfObjectGenerator.makePosition(new String[]{"frames:matches"}, new String[0]);
 				@SuppressWarnings("unchecked")
 				ArrayList<Object> matchesOperands = (ArrayList<Object>) matchesGroup.get("operands");
 				matchesOperands.add(conditionGroup);
 				// matches groups that are embedded at the second or lower level receive an additional
 				// focus to grep out only the query term to which the constraint applies
 				if (conditionCount > 1) {
-					LinkedHashMap<String,Object> focus = makeReference(128+classCounter-conditionGroups.size()+conditionCount-1);
+					LinkedHashMap<String,Object> focus = CqlfObjectGenerator.makeReference(128+classCounter-conditionGroups.size()+conditionCount-1);
 					ArrayList<Object> focusOperands = new ArrayList<Object>();
 					focus.put("operands", focusOperands);
 					focusOperands.add(matchesGroup);
@@ -363,8 +365,8 @@
 	private void processOPNHIT(Tree node) {
 		Integer[] classRef = new Integer[]{128+classCounter+1, 128+classCounter+2}; 
 		//            classRef.add(classCounter + 1);  // yes, do this twice (two classes)!
-		LinkedHashMap<String, Object> group = makeReference(128+classCounter);
-		LinkedHashMap<String, Object> classRefCheck = makeClassRefOp("classRefOp:inversion", classRef, classCounter+128);
+		LinkedHashMap<String, Object> group = CqlfObjectGenerator.makeReference(128+classCounter);
+		LinkedHashMap<String, Object> classRefCheck = CqlfObjectGenerator.makeClassRefOp("classRefOp:inversion", classRef, classCounter+128);
 		ArrayList<Object> operands = new ArrayList<Object>();
 		operands.add(classRefCheck);
 		group.put("operands", operands);
@@ -400,7 +402,7 @@
 		wrapOperandInClass(node,2,classCounter++);
 		wrapOperandInClass(node,1,classCounter++);
 		//            LinkedHashMap<String, Object> posgroup = makePosition(null);
-		LinkedHashMap<String, Object> posgroup = makeGroup("position");
+		LinkedHashMap<String, Object> posgroup = CqlfObjectGenerator.makeGroup("position");
 		LinkedHashMap<String, Object> positionOptions;
 		//            posgroup
 		if (nodeCat.equals("OPIN")) {
@@ -423,14 +425,14 @@
 		// Step II: wrap in reference and decide where to put
 		ArrayList<String> check = (ArrayList<String>) positionOptions.get("classRefCheck");
 		Integer[] classIn = new Integer[]{128+classCounter-2,128+classCounter-1};
-		LinkedHashMap<String, Object> classRefCheck = makeClassRefCheck(check, classIn, 128+classCounter);
+		LinkedHashMap<String, Object> classRefCheck = CqlfObjectGenerator.makeClassRefCheck(check, classIn, 128+classCounter);
 		((ArrayList<Object>) classRefCheck.get("operands")).add(posgroup);
 		LinkedHashMap<String, Object> focusGroup = null;
 		if ((boolean) positionOptions.get("matchall") == true) {
-			focusGroup = makeResetReference();
+			focusGroup = CqlfObjectGenerator.makeResetReference();
 			((ArrayList<Object>) focusGroup.get("operands")).add(classRefCheck);
 		} else { // match only first argument
-			focusGroup = wrapInReference(classRefCheck, 128+classCounter-1);
+			focusGroup = CqlfObjectGenerator.wrapInReference(classRefCheck, 128+classCounter-1);
 		}
 		putIntoSuperObject(focusGroup, 1);
 	}
@@ -442,7 +444,7 @@
 		Tree typ = prox_opts.getChild(0);
 		Tree dist_list = prox_opts.getChild(1);
 		// Step I: create group
-		LinkedHashMap<String, Object> group = makeGroup("sequence");
+		LinkedHashMap<String, Object> group = CqlfObjectGenerator.makeGroup("sequence");
 
 		ArrayList<Object> constraints = new ArrayList<Object>();
 		boolean exclusion = typ.getChild(0).toStringTree().equals("EXCL");
@@ -481,7 +483,7 @@
 			if (!meas.equals("w") && min == 0 ) {
 				processSpanDistance(meas,min,max);
 			}
-			LinkedHashMap<String, Object> distance = makeDistance(meas,min,max);
+			LinkedHashMap<String, Object> distance = CqlfObjectGenerator.makeDistance(meas,min,max);
 			if (exclusion) {
 				distance.put("exclude", true);
 			}
@@ -505,10 +507,10 @@
 		if (! (openNodeCats.get(1).equals("OPBEG") || openNodeCats.get(1).equals("OPEND") || inOPALL || openNodeCats.get(1).equals("OPNHIT"))) {
 			wrapOperandInClass(node,1,classCounter);
 			wrapOperandInClass(node,2,classCounter);
-			group = wrapInReference(group, 128+classCounter++);
+			group = CqlfObjectGenerator.wrapInReference(group, 128+classCounter++);
 		} else if (openNodeCats.get(1).equals("OPNHIT")) {
-			LinkedHashMap<String,Object> repetition = makeRepetition(min, max);
-			((ArrayList<Object>) repetition.get("operands")).add(makeToken());
+			LinkedHashMap<String,Object> repetition = CqlfObjectGenerator.makeRepetition(min, max);
+			((ArrayList<Object>) repetition.get("operands")).add(CqlfObjectGenerator.makeToken());
 			// TODO go on with this: put the repetition into a class and put it in between the operands
 			// -> what if there's several distance constraints. with different keys, like /w4,s0? 
 		}
@@ -516,10 +518,10 @@
 		LinkedHashMap<String,Object> sequence = null;
 		if (putIntoOverlapDisjunction) {
 			sequence = embeddedSequence;
-			group = makeGroup("or");
+			group = CqlfObjectGenerator.makeGroup("or");
 			ArrayList<Object> disjOperands = (ArrayList<Object>) group.get("operands");
 			String[] sharedClasses = new String[]{"intersects"};
-			LinkedHashMap<String,Object> overlapsGroup = makePosition(new String[0], sharedClasses);
+			LinkedHashMap<String,Object> overlapsGroup = CqlfObjectGenerator.makePosition(new String[0], sharedClasses);
 
 			ArrayList<Object> overlapsOperands = (ArrayList<Object>) overlapsGroup.get("operands");
 			// this ensures identity of the operands lists and thereby a distribution of the operands for both created objects 
@@ -528,7 +530,7 @@
 				invertedOperandsLists.push(overlapsOperands);
 			}
 			disjOperands.add(overlapsGroup);
-			disjOperands.add(wrapInReference(sequence, 0));
+			disjOperands.add(CqlfObjectGenerator.wrapInReference(sequence, 0));
 			// Step II: decide where to put
 			putIntoSuperObject(group, 0);
 			objectStack.push(sequence);
@@ -592,7 +594,7 @@
 	@SuppressWarnings("unchecked")
 	private void processOPELEM(Tree node) {
 		// Step I: create element
-		LinkedHashMap<String, Object> span = makeSpan();
+		LinkedHashMap<String, Object> span = CqlfObjectGenerator.makeSpan();
 		if (node.getChild(0).toStringTree().equals("EMPTY")) {
 
 		} else {
@@ -612,12 +614,12 @@
 				 * top-level group (in order to avoid a top-level group that only
 				 * contains a sub-group).
 				 */
-				LinkedHashMap<String, Object> termGroup = makeTermGroup("and");
+				LinkedHashMap<String, Object> termGroup = CqlfObjectGenerator.makeTermGroup("and");
 				ArrayList<Object> termGroupOperands = (ArrayList<Object>) termGroup.get("operands");
 				for (int i = elname; i < node.getChildCount(); i++) {
 					Tree attrNode = node.getChild(i);
 					if (attrNode.getChildCount() == 2) {
-						LinkedHashMap<String, Object> term = makeTerm();
+						LinkedHashMap<String, Object> term = CqlfObjectGenerator.makeTerm();
 						termGroupOperands.add(term);
 						String layer = attrNode.getChild(0).toStringTree();
 						String[] splitted = layer.split("/");
@@ -630,11 +632,11 @@
 						String match = getNodeCat(attrNode).equals("EQ") ? "eq" : "ne";
 						term.put("match", "match:" + match);
 					} else {
-						LinkedHashMap<String, Object> subTermGroup = makeTermGroup("and");
+						LinkedHashMap<String, Object> subTermGroup = CqlfObjectGenerator.makeTermGroup("and");
 						ArrayList<Object> subTermGroupOperands = (ArrayList<Object>) subTermGroup.get("operands");
 						int j;
 						for (j = 1; j < attrNode.getChildCount(); j++) {
-							LinkedHashMap<String, Object> term = makeTerm();
+							LinkedHashMap<String, Object> term = CqlfObjectGenerator.makeTerm();
 							String layer = attrNode.getChild(0).toStringTree();
 							String[] splitted = layer.split("/");
 							if (splitted.length > 1) {
@@ -671,7 +673,7 @@
 	private void processOPMORPH(Tree node) {
 		//Step I: get info
 		String[] morphterms = node.getChild(0).toStringTree().replace(" ", "").split("&");
-		LinkedHashMap<String, Object> token = makeToken();
+		LinkedHashMap<String, Object> token = CqlfObjectGenerator.makeToken();
 		ArrayList<Object> terms = new ArrayList<Object>();
 		LinkedHashMap<String, Object> fieldMap = null;
 		for (String morphterm : morphterms) {
@@ -706,7 +708,7 @@
 		if (morphterms.length == 1) {
 			token.put("wrap", fieldMap);
 		} else {
-			LinkedHashMap<String, Object> termGroup = makeTermGroup("and");
+			LinkedHashMap<String, Object> termGroup = CqlfObjectGenerator.makeTermGroup("and");
 			termGroup.put("operands", terms);
 			token.put("wrap", termGroup);
 		}
@@ -774,7 +776,7 @@
 	 * @param cls The class id.
 	 */
 	private void wrapOperandInClass(Tree node, int arg, int cls) {
-		LinkedHashMap<String,Object> clsGroup = makeSpanClass(cls);
+		LinkedHashMap<String,Object> clsGroup = CqlfObjectGenerator.makeSpanClass(cls);
 		wrapOperand(node,arg,clsGroup);
 	}
 
@@ -851,10 +853,10 @@
 			}
 		}
 		// Create the position group and add the span and the subquery as operands, possibly wrapped in spanRefs
-		LinkedHashMap<String, Object> positionGroup = makePosition(new String[]{position}, new String[0]);
+		LinkedHashMap<String, Object> positionGroup = CqlfObjectGenerator.makePosition(new String[]{position}, new String[0]);
 		if (negated) positionGroup.put("exclude", true);
 		ArrayList<Object> posOperands = new ArrayList<Object>();
-		LinkedHashMap<String, Object> classGroup = makeSpanClass(classCounter++);
+		LinkedHashMap<String, Object> classGroup = CqlfObjectGenerator.makeSpanClass(classCounter++);
 		classGroup.put("operands", distributedOperands);
 		positionGroup.put("operands", posOperands);
 		LinkedHashMap<String, Object> span = new LinkedHashMap<String, Object>();
@@ -862,12 +864,12 @@
 		span.put("key", elem);
 		objectStack.push(classGroup);
 		if (hitSpanRef != null) {
-			LinkedHashMap<String, Object> spanRefAroundHit = makeSpanReference(hitSpanRef, "focus");
+			LinkedHashMap<String, Object> spanRefAroundHit = CqlfObjectGenerator.makeSpanReference(hitSpanRef, "focus");
 			((ArrayList<Object>) spanRefAroundHit.get("operands")).add(classGroup);
 			classGroup = spanRefAroundHit; //re-assign after wrapping classGroup in spanRef
 		}
 		if (elemSpanRef != null) {
-			LinkedHashMap<String, Object> spanRefAroundSpan = makeSpanReference(elemSpanRef, "focus");
+			LinkedHashMap<String, Object> spanRefAroundSpan = CqlfObjectGenerator.makeSpanReference(elemSpanRef, "focus");
 			((ArrayList<Object>) spanRefAroundSpan.get("operands")).add(span);
 			span = spanRefAroundSpan; //re-assign after wrapping span in spanRef
 		}
diff --git a/src/main/java/de/ids_mannheim/korap/query/serialize/PoliqarpPlusQueryProcessor.java b/src/main/java/de/ids_mannheim/korap/query/serialize/PoliqarpPlusQueryProcessor.java
index d509ccb..458e976 100644
--- a/src/main/java/de/ids_mannheim/korap/query/serialize/PoliqarpPlusQueryProcessor.java
+++ b/src/main/java/de/ids_mannheim/korap/query/serialize/PoliqarpPlusQueryProcessor.java
@@ -3,6 +3,7 @@
 import de.ids_mannheim.korap.query.parse.poliqarpplus.PoliqarpPlusLexer;
 import de.ids_mannheim.korap.query.parse.poliqarpplus.PoliqarpPlusParser;
 import de.ids_mannheim.korap.query.serialize.util.Antlr4DescriptiveErrorListener;
+import de.ids_mannheim.korap.query.serialize.util.CqlfObjectGenerator;
 import de.ids_mannheim.korap.query.serialize.util.StatusCodes;
 import de.ids_mannheim.korap.query.serialize.util.QueryException;
 
@@ -34,6 +35,7 @@
 	 * @throws QueryException
 	 */
 	public PoliqarpPlusQueryProcessor(String query) throws QueryException {
+		CqlfObjectGenerator.setQueryProcessor(this);
 		process(query);
 		log.info(">>> " + requestMap.get("query") + " <<<");
 	}
@@ -171,9 +173,9 @@
 		// Cover possible quantification (i.e. repetition) of segment
 		ParseTree quantification = getFirstChildWithCat(node, "repetition");
 		if (quantification != null) {
-			LinkedHashMap<String,Object> quantGroup = makeGroup("repetition");
+			LinkedHashMap<String,Object> quantGroup = CqlfObjectGenerator.makeGroup("repetition");
 			Integer[] minmax = parseRepetition(quantification);
-			quantGroup.put("boundary", makeBoundary(minmax[0], minmax[1]));
+			quantGroup.put("boundary", CqlfObjectGenerator.makeBoundary(minmax[0], minmax[1]));
 			if (minmax[0] != null) quantGroup.put("min", minmax[0]);
 			if (minmax[1] != null) quantGroup.put("max", minmax[1]);
 			addMessage(StatusCodes.DEPRECATED_QUERY_ELEMENT, "Deprecated 2014-07-24: 'min' and 'max' to be " +
@@ -185,12 +187,12 @@
 	}
 
 	private void processSequence(ParseTree node) {
-		LinkedHashMap<String,Object> sequence = makeGroup("sequence");
+		LinkedHashMap<String,Object> sequence = CqlfObjectGenerator.makeGroup("sequence");
 		ParseTree distanceNode = getFirstChildWithCat(node, "distance");
 
 		if (distanceNode!=null) {
 			Integer[] minmax = parseDistance(distanceNode);
-			LinkedHashMap<String,Object> distance = makeDistance("w", minmax[0], minmax[1]);
+			LinkedHashMap<String,Object> distance = CqlfObjectGenerator.makeDistance("w", minmax[0], minmax[1]);
 			sequence.put("inOrder", true);
 			ArrayList<Object> distances = new ArrayList<Object>();
 			distances.add(distance);
@@ -211,9 +213,9 @@
 		Integer[] minmax = parseEmptySegments(node);
 		// object will be either a repetition group or a single empty token
 		LinkedHashMap<String,Object> object; 
-		LinkedHashMap<String,Object> emptyToken = makeToken();
+		LinkedHashMap<String,Object> emptyToken = CqlfObjectGenerator.makeToken();
 		if (minmax[0] != 1 || minmax[1] == null || minmax[1] != 1) {
-			object = makeRepetition(minmax[0], minmax[1]);
+			object = CqlfObjectGenerator.makeRepetition(minmax[0], minmax[1]);
 			((ArrayList<Object>) object.get("operands")).add(emptyToken);
 		} else {
 			object = emptyToken;
@@ -228,14 +230,14 @@
 		if (hasChild(node, "spanclass_id")) {
 			classId = Integer.parseInt(node.getChild(1).getChild(0).toStringTree(parser));
 		}
-		LinkedHashMap<String,Object> classGroup = makeSpanClass(classId, false);
+		LinkedHashMap<String,Object> classGroup = CqlfObjectGenerator.makeSpanClass(classId, false);
 		putIntoSuperObject(classGroup);
 		objectStack.push(classGroup);
 		stackedObjects++;
 	}
 
 	private void processToken(ParseTree node) {
-		LinkedHashMap<String,Object> token = makeToken();
+		LinkedHashMap<String,Object> token = CqlfObjectGenerator.makeToken();
 		// handle negation
 		List<ParseTree> negations = getChildrenWithCat(node, "!");
 		boolean negated = false;
@@ -243,7 +245,7 @@
 		if (negations.size() % 2 == 1) negated = true;
 		if (getNodeCat(node.getChild(0)).equals("key")) {
 			// no 'term' child, but direct key specification: process here
-			LinkedHashMap<String,Object> term = makeTerm();
+			LinkedHashMap<String,Object> term = CqlfObjectGenerator.makeTerm();
 
 			String key = node.getChild(0).getText();
 			if (getNodeCat(node.getChild(0).getChild(0)).equals("regex")) {
@@ -284,7 +286,7 @@
 
 	@SuppressWarnings("unchecked")
 	private void processAlignment(ParseTree node) {
-		LinkedHashMap<String,Object> alignClass = makeSpanClass(++classCounter,false);
+		LinkedHashMap<String,Object> alignClass = CqlfObjectGenerator.makeSpanClass(++classCounter,false);
 		LinkedHashMap<String,Object> metaMap = (LinkedHashMap<String, Object>) requestMap.get("meta");
 		if (metaMap.containsKey("alignment")) {
 			ArrayList<Integer> alignedClasses = new ArrayList<Integer>();
@@ -308,7 +310,7 @@
 		List<ParseTree> negations = getChildrenWithCat(node, "!");
 		boolean negated = false;
 		if (negations.size() % 2 == 1) negated = true;
-		LinkedHashMap<String,Object> span = makeSpan();
+		LinkedHashMap<String,Object> span = CqlfObjectGenerator.makeSpan();
 		ParseTree keyNode = getFirstChildWithCat(node, "key");
 		ParseTree layerNode = getFirstChildWithCat(node, "layer");
 		ParseTree foundryNode = getFirstChildWithCat(node, "foundry");
@@ -343,7 +345,7 @@
 	}
 
 	private void processDisjunction(ParseTree node) {
-		LinkedHashMap<String,Object> disjunction = makeGroup("or");
+		LinkedHashMap<String,Object> disjunction = CqlfObjectGenerator.makeGroup("or");
 		putIntoSuperObject(disjunction);
 		objectStack.push(disjunction);
 		stackedObjects++;
@@ -357,8 +359,8 @@
 	}
 
 	private void processRelation(ParseTree node) {
-		LinkedHashMap<String, Object> relationGroup = makeGroup("relation");
-		LinkedHashMap<String, Object> relation = makeRelation();
+		LinkedHashMap<String, Object> relationGroup = CqlfObjectGenerator.makeGroup("relation");
+		LinkedHashMap<String, Object> relation = CqlfObjectGenerator.makeRelation();
 		relationGroup.put("relation", relation);
 		if (node.getChild(0).getText().equals("dominates")) {
 			relation.put("layer", "c");
@@ -375,7 +377,7 @@
 		}
 		if (repetition != null) {
 			Integer[] minmax =  parseRepetition(repetition);
-			relation.put("boundary", makeBoundary(minmax[0], minmax[1]));
+			relation.put("boundary", CqlfObjectGenerator.makeBoundary(minmax[0], minmax[1]));
 		}
 		putIntoSuperObject(relationGroup);
 		objectStack.push(relationGroup);
@@ -402,7 +404,7 @@
 				classId = 127;
 			}
 		}
-		LinkedHashMap<String, Object> classGroup = makeSpanClass(classId, false);
+		LinkedHashMap<String, Object> classGroup = CqlfObjectGenerator.makeSpanClass(classId, false);
 		putIntoSuperObject(classGroup);
 		objectStack.push(classGroup);
 		stackedObjects++;
@@ -439,7 +441,7 @@
 		} else {
 			classRefs.add(1);
 		}
-		LinkedHashMap<String, Object> referenceGroup = makeReference(classRefs);
+		LinkedHashMap<String, Object> referenceGroup = CqlfObjectGenerator.makeReference(classRefs);
 
 		String type = node.getChild(0).toStringTree(parser);
 		// Default is focus(), if deviating catch here
@@ -463,7 +465,7 @@
 	}
 
 	private void processSubmatch(ParseTree node) {
-		LinkedHashMap<String,Object> submatch = makeReference(null);
+		LinkedHashMap<String,Object> submatch = CqlfObjectGenerator.makeReference(null);
 		submatch.put("operands", new ArrayList<Object>());
 		ParseTree startpos = getFirstChildWithCat(node,"startpos");
 		ParseTree length = getFirstChildWithCat(node,"length");
@@ -574,7 +576,7 @@
 			classRefCheck = new String[]{"classRefCheck:intersects"};
 			break;
 		}
-		return makePosition(frames,classRefCheck);
+		return CqlfObjectGenerator.makePosition(frames,classRefCheck);
 	}
 
 
@@ -595,7 +597,7 @@
 		String nodeCat = getNodeCat(node);
 		if (nodeCat.equals("term")) {
 			String key = null;
-			LinkedHashMap<String,Object> term = makeTerm();
+			LinkedHashMap<String,Object> term = CqlfObjectGenerator.makeTerm();
 			// handle negation
 			boolean negated = negatedGlobal;
 			boolean isRegex = false;
@@ -663,7 +665,7 @@
 			// establish boolean relation
 			ParseTree boolOp = getFirstChildWithCat(node, "boolOp"); 
 			String operator = boolOp.getText().equals("&") ? "and" : "or";
-			termGroup = makeTermGroup(operator);
+			termGroup = CqlfObjectGenerator.makeTermGroup(operator);
 			ArrayList<Object> operands = (ArrayList<Object>) termGroup.get("operands");
 			// recursion with left/right operands
 			operands.add(parseTermOrTermGroup(leftOp, negatedGlobal, mode));
diff --git a/src/main/java/de/ids_mannheim/korap/query/serialize/util/CqlfObjectGenerator.java b/src/main/java/de/ids_mannheim/korap/query/serialize/util/CqlfObjectGenerator.java
new file mode 100644
index 0000000..b03959d
--- /dev/null
+++ b/src/main/java/de/ids_mannheim/korap/query/serialize/util/CqlfObjectGenerator.java
@@ -0,0 +1,272 @@
+package de.ids_mannheim.korap.query.serialize.util;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.LinkedHashMap;
+
+import de.ids_mannheim.korap.query.serialize.AbstractQueryProcessor;
+
+public class CqlfObjectGenerator {
+
+	protected static final Integer MAXIMUM_DISTANCE = 100; 
+	private static AbstractQueryProcessor qp;
+	
+	public static void setQueryProcessor(AbstractQueryProcessor qp) {
+		CqlfObjectGenerator.qp = qp;
+	}
+	
+	public static LinkedHashMap<String, Object> makeSpan() {
+		LinkedHashMap<String, Object> span = new LinkedHashMap<String, Object>();
+		span.put("@type", "korap:span");
+		return span;
+	}
+	
+	public static LinkedHashMap<String, Object> makeSpan(String key) {
+		LinkedHashMap<String, Object> span = new LinkedHashMap<String, Object>();
+		span.put("@type", "korap:span");
+		span.put("key", key);
+		return span;
+	}
+	
+	public static LinkedHashMap<String, Object> makeTerm() {
+		LinkedHashMap<String, Object> term = new LinkedHashMap<String, Object>();
+		term.put("@type", "korap:term");
+		return term;
+	}
+	
+	public static LinkedHashMap<String, Object> makeTermGroup(String relation) {
+		LinkedHashMap<String, Object> term = new LinkedHashMap<String, Object>();
+		term.put("@type", "korap:termGroup");
+		term.put("relation", "relation:"+relation);
+		term.put("operands", new ArrayList<Object>());
+		return term;
+	}
+	
+	public static LinkedHashMap<String, Object> makeDoc() {
+		LinkedHashMap<String, Object> term = new LinkedHashMap<String, Object>();
+		term.put("@type", "korap:doc");
+		return term;
+	}
+	
+	public static LinkedHashMap<String, Object> makeDocGroup(String relation) {
+		LinkedHashMap<String, Object> term = new LinkedHashMap<String, Object>();
+		term.put("@type", "korap:docGroup");
+		term.put("operation", "operation:"+relation);
+		term.put("operands", new ArrayList<Object>());
+		return term;
+	}
+	
+	public static LinkedHashMap<String, Object> makeToken() {
+		LinkedHashMap<String, Object> token = new LinkedHashMap<String, Object>();
+		token.put("@type", "korap:token");
+		return token;
+	}
+	
+	public static LinkedHashMap<String, Object> makeGroup(String operation) {
+		LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
+		group.put("@type", "korap:group");
+		group.put("operation", "operation:"+operation);
+		group.put("operands", new ArrayList<Object>());
+		return group;
+	}
+	
+	public static LinkedHashMap<String, Object> makeRepetition(Integer min, Integer max) {
+		LinkedHashMap<String, Object> group = makeGroup("repetition");
+		group.put("boundary", makeBoundary(min, max));
+		group.put("min", min);
+		return group;
+	}
+	
+	@Deprecated
+	public static LinkedHashMap<String, Object> makePosition(String frame) {
+		LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
+		group.put("@type", "korap:group");
+		group.put("operation", "operation:position");
+		group.put("frame", "frame:"+frame);
+		group.put("operands", new ArrayList<Object>());
+		return group;
+	}
+	
+	public static LinkedHashMap<String, Object> makePosition(String[] allowedFrames, String[] classRefCheck) {
+		LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
+		group.put("@type", "korap:group");
+		group.put("operation", "operation:position");
+		group.put("frames", Arrays.asList(allowedFrames));
+		group.put("operands", new ArrayList<Object>());
+		return group;
+	}
+	
+	public static LinkedHashMap<String, Object> makeSpanClass(int classCount) {
+		return makeSpanClass(classCount, true);
+	}
+	
+	public static LinkedHashMap<String, Object> makeSpanClass(int classCount, boolean setBySystem) {
+		LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
+		group.put("@type", "korap:group");
+		group.put("operation", "operation:class");
+		if (setBySystem) {
+			group.put("class", 128+classCount);
+			group.put("classOut", 128+classCount);
+			qp.addMessage("A class has been introduced into the backend representation of " +
+					"your query for later reference to a part of the query. The class id is "+(128+classCount));
+		} else {
+			group.put("class", classCount);
+			group.put("classOut", classCount);
+		}
+		qp.addMessage(StatusCodes.DEPRECATED_QUERY_ELEMENT, "Deprecated 2014-10-07: 'class' only to be supported until 3 months from deprecation date. " +
+				"Classes are now defined using the 'classOut' attribute.");
+		group.put("operands", new ArrayList<Object>());
+		return group;
+	}
+	
+	public static LinkedHashMap<String, Object> makeClassRefCheck(ArrayList<String> check, Integer[] classIn, int classOut) {
+		LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
+		group.put("@type", "korap:group");
+		group.put("operation", "operation:class");
+		group.put("classRefCheck", check);
+		group.put("classIn", Arrays.asList(classIn));
+		group.put("classOut", classOut);
+		group.put("class", classOut);
+		qp.addMessage(StatusCodes.DEPRECATED_QUERY_ELEMENT, "Deprecated 2014-10-07: 'class' only to be supported until 3 months from deprecation date. " +
+				"Classes are now defined using the 'classOut' attribute.");
+		group.put("operands", new ArrayList<Object>());
+		return group;
+	}
+	
+	public static LinkedHashMap<String, Object> makeClassRefOp(String operation, Integer[] classIn, int classOut) {
+		LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
+		group.put("@type", "korap:group");
+		group.put("operation", "operation:class");
+		group.put("classRefOp", operation);
+		group.put("classIn", Arrays.asList(classIn));
+		group.put("classOut", classOut);
+		group.put("operands", new ArrayList<Object>());
+		return group;
+	}
+	
+	@Deprecated
+	public static LinkedHashMap<String, Object> makeTreeRelation(String reltype) {
+		LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
+		group.put("@type", "korap:treeRelation");
+		if (reltype != null) group.put("reltype", reltype);
+		return group;
+	}
+	
+	public static LinkedHashMap<String, Object> makeRelation() {
+		LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
+		group.put("@type", "korap:relation");
+		return group;
+	}
+	
+	public static LinkedHashMap<String, Object> makeBoundary(Integer min, Integer max) {
+		LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
+		group.put("@type", "korap:boundary");
+		group.put("min", min);
+		if (max != null) {
+			group.put("max", max);
+		}
+		return group;
+	}
+
+	public static LinkedHashMap<String, Object> makeDistance(String key, Integer min, Integer max) {
+		LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
+		if (key.equals("w")) {
+			group.put("@type", "korap:distance");
+		} else {
+			group.put("@type", "cosmas:distance");
+		}
+		group.put("key", key);
+		group.put("boundary", makeBoundary(min, max));
+		group.put("min", min);
+		if (max != null) {
+			group.put("max", max);
+		}
+		qp.addMessage(StatusCodes.DEPRECATED_QUERY_ELEMENT, "Deprecated 2014-07-24: 'min' and 'max' to be supported until 3 months from deprecation date.");
+		return group;
+	}
+	
+	public static LinkedHashMap<String, Object> makeReference(ArrayList<Integer> classRefs, String operation) {
+		LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
+		group.put("@type", "korap:reference");
+		group.put("operation", "operation:"+operation);
+		if (classRefs!= null && !classRefs.isEmpty()) {
+			group.put("classRef", classRefs);
+		}
+		return group;
+	}
+	
+	public static LinkedHashMap<String, Object> makeReference(ArrayList<Integer> classRefs) {
+		return makeReference(classRefs, "focus");
+	}
+	
+	public static LinkedHashMap<String, Object> makeReference(int classRef, String operation, boolean setBySystem) {
+		ArrayList<Integer> classRefs = new ArrayList<Integer>();
+		if (setBySystem) classRef = classRef+128;
+		classRefs.add(classRef);
+		return makeReference(classRefs, operation);
+	}
+	
+	public static LinkedHashMap<String, Object> makeReference(int classRef, boolean setBySystem) {
+		ArrayList<Integer> classRefs = new ArrayList<Integer>();
+		if (setBySystem) classRef = classRef+128;
+		classRefs.add(classRef);
+		return makeReference(classRefs, "focus");
+	}
+	
+	public static LinkedHashMap<String, Object> makeReference(int classRef) {
+		return makeReference(classRef, false);
+	}
+	
+	public static LinkedHashMap<String, Object> makeResetReference() {
+		LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
+		group.put("@type", "korap:reference");
+		group.put("operation", "operation:focus");
+		group.put("reset", true);
+		group.put("operands", new ArrayList<Object>());
+		return group;
+	}
+	
+	public static LinkedHashMap<String, Object> makeSpanReference(Integer[] spanRef, String operation) {
+		LinkedHashMap<String, Object> group = new LinkedHashMap<String, Object>();
+		group.put("@type", "korap:reference");
+		group.put("operation", "operation:"+operation);
+		group.put("spanRef", Arrays.asList(spanRef));
+		group.put("operands", new ArrayList<Object>());
+		return group;
+	}
+	
+	public static void addOperandsToGroup(LinkedHashMap<String, Object> group) {
+		ArrayList<Object> operands = new ArrayList<Object>();
+		group.put("operands", operands);
+	}
+	
+	public static LinkedHashMap<String, Object> wrapInReference(LinkedHashMap<String, Object> group, Integer classId) {
+		LinkedHashMap<String, Object> refGroup = makeReference(classId);
+		ArrayList<Object> operands = new ArrayList<Object>();
+		operands.add(group);
+		refGroup.put("operands", operands);
+		return refGroup;
+	}
+
+	@SuppressWarnings("unchecked")
+	public static LinkedHashMap<String, Object> wrapInClass(LinkedHashMap<String, Object> group, Integer classId) {
+		LinkedHashMap<String, Object> classGroup = makeSpanClass(classId, true);
+		((ArrayList<Object>) classGroup.get("operands")).add(group);
+		return classGroup;
+	}
+	
+	/**
+	 * Ensures that a distance or quantification value does not exceed the allowed maximum value. 
+	 * @param number
+	 * @return The input number if it is below the allowed maximum value, else the maximum value. 
+	 */
+	public static int cropToMaxValue(int number) {
+		if (number > MAXIMUM_DISTANCE) {
+			number = MAXIMUM_DISTANCE; 
+			String warning = String.format("You specified a distance between two segments that is greater than " +
+					"the allowed max value of %d. Your query will be re-interpreted using a distance of %d.", MAXIMUM_DISTANCE, MAXIMUM_DISTANCE);
+			qp.addWarning(warning);
+		}
+		return number;
+	}
+}