collection query and meta serialization;
refactoring REST service;
piwik filter for REST;
diff --git a/src/main/java/de/ids_mannheim/korap/query/serialize/CollectionQuery.java b/src/main/java/de/ids_mannheim/korap/query/serialize/CollectionQueryBuilder.java
similarity index 93%
rename from src/main/java/de/ids_mannheim/korap/query/serialize/CollectionQuery.java
rename to src/main/java/de/ids_mannheim/korap/query/serialize/CollectionQueryBuilder.java
index c758f1d..c48f365 100644
--- a/src/main/java/de/ids_mannheim/korap/query/serialize/CollectionQuery.java
+++ b/src/main/java/de/ids_mannheim/korap/query/serialize/CollectionQueryBuilder.java
@@ -17,7 +17,7 @@
* @author hanl
* @date 06/12/2013
*/
-public class CollectionQuery {
+public class CollectionQueryBuilder {
private CollectionTypes types;
private List<Map> rq;
@@ -27,14 +27,14 @@
private Relation simpleExtendRel = Relation.AND;
- public CollectionQuery() {
+ public CollectionQueryBuilder() {
this.rq = new ArrayList<>();
this.mfilter = ArrayListMultimap.create();
this.mextension = ArrayListMultimap.create();
this.types = new CollectionTypes();
}
- public CollectionQuery addResource(String query) {
+ public CollectionQueryBuilder addResource(String query) {
try {
List v = JsonUtils.read(query, LinkedList.class);
this.rq.addAll(v);
@@ -44,40 +44,40 @@
return this;
}
- public CollectionQuery addResources(List<String> queries) {
+ public CollectionQueryBuilder addResources(List<String> queries) {
for (String query : queries)
addResource(query);
return this;
}
- public CollectionQuery addMetaFilter(String key, String value) {
+ public CollectionQueryBuilder addMetaFilter(String key, String value) {
this.mfilter.put(key, value);
return this;
}
- public CollectionQuery addMetaFilterQuery(String queries) {
+ public CollectionQueryBuilder addMetaFilterQuery(String queries) {
this.mfilter.putAll(resRel(queries));
return this;
}
- public CollectionQuery addMetaExtend(String key, String value) {
+ public CollectionQueryBuilder addMetaExtend(String key, String value) {
this.mextension.put(key, value);
return this;
}
- public CollectionQuery setFilterAttributeRelation(Relation rel) {
+ public CollectionQueryBuilder setFilterAttributeRelation(Relation rel) {
simpleFilterRel = rel;
return this;
}
- public CollectionQuery setExtendAttributeRelation(Relation rel) {
+ public CollectionQueryBuilder setExtendAttributeRelation(Relation rel) {
simpleExtendRel = rel;
return this;
}
- public CollectionQuery addMetaExtendQuery(String queries) {
+ public CollectionQueryBuilder addMetaExtendQuery(String queries) {
this.mextension.putAll(resRel(queries));
return this;
}
diff --git a/src/main/java/de/ids_mannheim/korap/query/serialize/CollectionTypes.java b/src/main/java/de/ids_mannheim/korap/query/serialize/CollectionTypes.java
index 799acb1..414179b 100644
--- a/src/main/java/de/ids_mannheim/korap/query/serialize/CollectionTypes.java
+++ b/src/main/java/de/ids_mannheim/korap/query/serialize/CollectionTypes.java
@@ -1,12 +1,9 @@
package de.ids_mannheim.korap.query.serialize;
-import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
-import org.joda.time.DateTime;
import java.io.IOException;
import java.util.LinkedHashMap;
-import java.util.LinkedList;
import java.util.List;
import java.util.Map;
@@ -53,7 +50,7 @@
}
public Map createTerm(String field, String value) {
- return createTerm(field, value, null);
+ return createTerm(field, value, null);
}
public Map createResourceFilter(String resource, Map value) {
@@ -91,7 +88,6 @@
}
-
public Map mapify(String s) throws IOException {
return mapper.readValue(s, Map.class);
}
diff --git a/src/main/java/de/ids_mannheim/korap/query/serialize/ExpertFilter.java b/src/main/java/de/ids_mannheim/korap/query/serialize/ExpertFilter.java
index 3526657..5dcf845 100644
--- a/src/main/java/de/ids_mannheim/korap/query/serialize/ExpertFilter.java
+++ b/src/main/java/de/ids_mannheim/korap/query/serialize/ExpertFilter.java
@@ -1,139 +1,129 @@
package de.ids_mannheim.korap.query.serialize;
-import de.ids_mannheim.korap.query.serialize.util.CollectionQueryParser;
import de.ids_mannheim.korap.query.serialize.util.CollectionQueryLexer;
+import de.ids_mannheim.korap.query.serialize.util.CollectionQueryParser;
import de.ids_mannheim.korap.util.QueryException;
+import org.antlr.v4.runtime.*;
+import org.antlr.v4.runtime.tree.ParseTree;
import java.lang.reflect.Method;
import java.util.*;
-import org.antlr.v4.runtime.ANTLRInputStream;
-import org.antlr.v4.runtime.BailErrorStrategy;
-import org.antlr.v4.runtime.CharStream;
-import org.antlr.v4.runtime.CommonTokenStream;
-import org.antlr.v4.runtime.Lexer;
-import org.antlr.v4.runtime.Parser;
-import org.antlr.v4.runtime.ParserRuleContext;
-import org.antlr.v4.runtime.tree.ParseTree;
-
/**
* @author hanl
* @date 06/12/2013
*/
public class ExpertFilter extends Antlr4AbstractSyntaxTree {
- private Parser parser;
- private boolean verbose = false;
- private List<ParseTree> visited = new ArrayList<ParseTree>();
- /**
- * Top-level map representing the whole request.
- */
- LinkedHashMap<String,Object> requestMap = new LinkedHashMap<String,Object>();
- /**
- * Keeps track of active object.
- */
- LinkedList<LinkedHashMap<String,Object>> objectStack = new LinkedList<LinkedHashMap<String,Object>>();
- /**
- * Keeps track of open node categories
- */
- LinkedList<String> openNodeCats = new LinkedList<String>();
- /**
- * Keeps track of how many objects there are to pop after every recursion of {@link #processNode(ParseTree)}
- */
- LinkedList<Integer> objectsToPop = new LinkedList<Integer>();
- Integer stackedObjects = 0;
+ private Parser parser;
+ private boolean verbose = false;
+ private List<ParseTree> visited = new ArrayList<ParseTree>();
+ /**
+ * Top-level map representing the whole request.
+ */
+ LinkedHashMap<String, Object> requestMap = new LinkedHashMap<String, Object>();
+ /**
+ * Keeps track of active object.
+ */
+ LinkedList<LinkedHashMap<String, Object>> objectStack = new LinkedList<LinkedHashMap<String, Object>>();
+ /**
+ * Keeps track of open node categories
+ */
+ LinkedList<String> openNodeCats = new LinkedList<String>();
+ /**
+ * Keeps track of how many objects there are to pop after every recursion of {@link #processNode(ParseTree)}
+ */
+ LinkedList<Integer> objectsToPop = new LinkedList<Integer>();
+ Integer stackedObjects = 0;
public ExpertFilter() {
}
-
- @Override
- public void process(String query) throws QueryException {
- ParseTree tree = parseCollectionQuery(query);
- if (this.parser != null) {
- super.parser = this.parser;
- } else {
- throw new NullPointerException("Parser has not been instantiated!");
- }
-
- System.out.println("Processing collection query");
- if (verbose) System.out.println(tree.toStringTree(parser));
- processNode(tree);
- }
- private void processNode(ParseTree node) {
- // Top-down processing
- String nodeCat = getNodeCat(node);
- openNodeCats.push(nodeCat);
-
- stackedObjects = 0;
-
- if (verbose) {
- System.err.println(" "+objectStack);
- System.out.println(openNodeCats);
- }
+ @Override
+ public void process(String query) throws QueryException {
+ ParseTree tree = parseCollectionQuery(query);
+ if (this.parser != null) {
+ super.parser = this.parser;
+ } else {
+ throw new NullPointerException("Parser has not been instantiated!");
+ }
+
+ System.out.println("Processing collection query");
+ if (verbose) System.out.println(tree.toStringTree(parser));
+ processNode(tree);
+ }
+
+ private void processNode(ParseTree node) {
+ // Top-down processing
+ String nodeCat = getNodeCat(node);
+ openNodeCats.push(nodeCat);
+
+ stackedObjects = 0;
+
+ if (verbose) {
+ System.err.println(" " + objectStack);
+ System.out.println(openNodeCats);
+ }
/*
- ****************************************************************
+ ****************************************************************
****************************************************************
* Processing individual node categories *
****************************************************************
****************************************************************
*/
-
- if (nodeCat.equals("andGroup")) {
- LinkedHashMap<String, Object> exprGroup = makeTermGroup("and");
- objectStack.push(exprGroup);
- stackedObjects++;
- putIntoSuperObject(exprGroup,1);
- }
-
- if (nodeCat.equals("orGroup")) {
- LinkedHashMap<String, Object> exprGroup = makeTermGroup("or");
- objectStack.push(exprGroup);
- stackedObjects++;
- putIntoSuperObject(exprGroup,1);
- }
-
- if (nodeCat.equals("expr")) {
- ParseTree fieldNode = getFirstChildWithCat(node, "field");
- String field = fieldNode.getChild(0).toStringTree(parser);
- List<ParseTree> operatorNodes = getChildrenWithCat(node, "operator");
- List<ParseTree> valueNodes = getChildrenWithCat(node, "value");
-
- if (valueNodes.size()==1) {
- LinkedHashMap<String, Object> term = makeTerm();
- term.put("attribute", field);
- term.put("key", valueNodes.get(0).getChild(0).toStringTree(parser));
- String match = operatorNodes.get(0).getChild(0).toStringTree(parser);
- term.put("match", "match:"+interpretMatch(match));
- putIntoSuperObject(term);
- } else { // (valueNodes.size()==2)
- LinkedHashMap<String, Object> termGroup = makeTermGroup("and");
- ArrayList<Object> termGroupOperands = (ArrayList<Object>) termGroup.get("operands");
-
- LinkedHashMap<String, Object> term1 = makeTerm();
- term1.put("attribute", field);
- term1.put("key", valueNodes.get(0).getChild(0).toStringTree(parser));
- String match1 = operatorNodes.get(0).getChild(0).toStringTree(parser);
- term1.put("match", "match:"+invertInequation(interpretMatch(match1)));
- termGroupOperands.add(term1);
-
- LinkedHashMap<String, Object> term2 = makeTerm();
- term2.put("attribute", field);
- term2.put("key", valueNodes.get(1).getChild(0).toStringTree(parser));
- String match2 = operatorNodes.get(1).getChild(0).toStringTree(parser);
- term2.put("match", "match:"+interpretMatch(match2));
- termGroupOperands.add(term2);
-
- putIntoSuperObject(termGroup);
- }
-
- }
-
-
-
- objectsToPop.push(stackedObjects);
+
+ if (nodeCat.equals("andGroup")) {
+ LinkedHashMap<String, Object> exprGroup = makeTermGroup("and");
+ objectStack.push(exprGroup);
+ stackedObjects++;
+ putIntoSuperObject(exprGroup, 1);
+ }
+
+ if (nodeCat.equals("orGroup")) {
+ LinkedHashMap<String, Object> exprGroup = makeTermGroup("or");
+ objectStack.push(exprGroup);
+ stackedObjects++;
+ putIntoSuperObject(exprGroup, 1);
+ }
+
+ if (nodeCat.equals("expr")) {
+ ParseTree fieldNode = getFirstChildWithCat(node, "field");
+ String field = fieldNode.getChild(0).toStringTree(parser);
+ List<ParseTree> operatorNodes = getChildrenWithCat(node, "operator");
+ List<ParseTree> valueNodes = getChildrenWithCat(node, "value");
+
+ if (valueNodes.size() == 1) {
+ LinkedHashMap<String, Object> term = makeTerm();
+ term.put("attribute", field);
+ term.put("key", valueNodes.get(0).getChild(0).toStringTree(parser));
+ String match = operatorNodes.get(0).getChild(0).toStringTree(parser);
+ term.put("match", "match:" + interpretMatch(match));
+ putIntoSuperObject(term);
+ } else { // (valueNodes.size()==2)
+ LinkedHashMap<String, Object> termGroup = makeTermGroup("and");
+ ArrayList<Object> termGroupOperands = (ArrayList<Object>) termGroup.get("operands");
+
+ LinkedHashMap<String, Object> term1 = makeTerm();
+ term1.put("attribute", field);
+ term1.put("key", valueNodes.get(0).getChild(0).toStringTree(parser));
+ String match1 = operatorNodes.get(0).getChild(0).toStringTree(parser);
+ term1.put("match", "match:" + invertInequation(interpretMatch(match1)));
+ termGroupOperands.add(term1);
+
+ LinkedHashMap<String, Object> term2 = makeTerm();
+ term2.put("attribute", field);
+ term2.put("key", valueNodes.get(1).getChild(0).toStringTree(parser));
+ String match2 = operatorNodes.get(1).getChild(0).toStringTree(parser);
+ term2.put("match", "match:" + interpretMatch(match2));
+ termGroupOperands.add(term2);
+
+ putIntoSuperObject(termGroup);
+ }
+
+ }
+ objectsToPop.push(stackedObjects);
/*
****************************************************************
@@ -142,126 +132,137 @@
****************************************************************
****************************************************************
*/
- for (int i=0; i<node.getChildCount(); i++) {
- ParseTree child = node.getChild(i);
- processNode(child);
- }
+ for (int i = 0; i < node.getChildCount(); i++) {
+ ParseTree child = node.getChild(i);
+ processNode(child);
+ }
/*
**************************************************************
* Stuff that happens after processing the children of a node *
**************************************************************
*/
- if (!objectsToPop.isEmpty()) {
- for (int i=0; i<objectsToPop.pop(); i++) {
- objectStack.pop();
- }
- }
- openNodeCats.pop();
-
-
- }
+ if (!objectsToPop.isEmpty()) {
+ for (int i = 0; i < objectsToPop.pop(); i++) {
+ objectStack.pop();
+ }
+ }
+ openNodeCats.pop();
-
-
- private String interpretMatch(String match) {
- String out = null;
- if (match.equals("<")) {
- out = "lt";
- } else if (match.equals(">")) {
- out = "gt";
- } else if (match.equals("<=")) {
- out = "leq";
- } else if (match.equals(">=")) {
- out = "geq";
- } else if (match.equals("=")) {
- out = "eq";
- } else if (match.equals("!=")) {
- out = "ne";
- }
- return out;
- }
- private String invertInequation(String op) {
- String inv = null;
- if (op.equals("lt")) {
- inv = "gt";
- } else if (op.equals("leq")) {
- inv = "geq";
- } else if (op.equals("gt")) {
- inv = "lt";
- } else if (op.equals("geq")) {
- inv = "leq";
- }
- return inv;
- }
-
- private void putIntoSuperObject(LinkedHashMap<String, Object> object) {
- putIntoSuperObject(object, 0);
- }
-
- @SuppressWarnings({ "unchecked" })
- private void putIntoSuperObject(LinkedHashMap<String, Object> object, int objStackPosition) {
- if (objectStack.size()>objStackPosition) {
- ArrayList<Object> topObjectOperands = (ArrayList<Object>) objectStack.get(objStackPosition).get("operands");
- topObjectOperands.add(0, object);
-
- } else {
- requestMap.put("query", object);
- }
- }
-
- private ParserRuleContext parseCollectionQuery (String p) throws QueryException {
- Lexer collectionQueryLexer = new CollectionQueryLexer((CharStream)null);
- ParserRuleContext tree = null;
- // Like p. 111
- try {
-
- // Tokenize input data
- ANTLRInputStream input = new ANTLRInputStream(p);
- collectionQueryLexer.setInputStream(input);
- CommonTokenStream tokens = new CommonTokenStream(collectionQueryLexer);
- parser = new CollectionQueryParser(tokens);
-
- // Don't throw out erroneous stuff
- parser.setErrorHandler(new BailErrorStrategy());
- parser.removeErrorListeners();
- // Get starting rule from parser
- Method startRule = CollectionQueryParser.class.getMethod("start");
- tree = (ParserRuleContext) startRule.invoke(parser, (Object[])null);
-
- }
- // Some things went wrong ...
- catch (Exception e) {
- System.err.println( e.getMessage() );
- }
- if (tree == null) {
- throw new QueryException("Could not parse expert filter query. Make sure it is correct syntax.");
- }
- // Return the generated tree
- return tree;
- }
-
- public static void main(String[] args) {
- String query = "foo=bar&c=d";
- query = "(1990<year<2010&genre=Sport)|textClass=politk";
- query = "(textClass=wissenschaft & textClass=politik) | textClass=ausland";
- query = "1990<year<2010 & genre=Sport";
- ExpertFilter filter = new ExpertFilter();
-// filter.verbose = true;
- try {
- filter.process(query);
- } catch (QueryException e) {
- e.printStackTrace();
- }
- System.out.println(filter.getRequestMap());
-
}
- @Override
- public Map<String, Object> getRequestMap() {
- return requestMap;
- }
+
+ private String interpretMatch(String match) {
+ String out = null;
+ switch (match) {
+ case "<":
+ out = "lt";
+ break;
+ case ">":
+ out = "gt";
+ break;
+ case "<=":
+ out = "leq";
+ break;
+ case ">=":
+ out = "geq";
+ break;
+ case "=":
+ out = "eq";
+ break;
+ case "!=":
+ out = "ne";
+ break;
+ }
+ return out;
+ }
+
+ private String invertInequation(String op) {
+ String inv = null;
+ switch (op) {
+ case "lt":
+ inv = "gt";
+ break;
+ case "leq":
+ inv = "geq";
+ break;
+ case "gt":
+ inv = "lt";
+ break;
+ case "geq":
+ inv = "leq";
+ break;
+ }
+ return inv;
+ }
+
+ private void putIntoSuperObject(LinkedHashMap<String, Object> object) {
+ putIntoSuperObject(object, 0);
+ }
+
+ @SuppressWarnings({"unchecked"})
+ private void putIntoSuperObject(LinkedHashMap<String, Object> object, int objStackPosition) {
+ if (objectStack.size() > objStackPosition) {
+ ArrayList<Object> topObjectOperands = (ArrayList<Object>) objectStack.get(objStackPosition).get("operands");
+ topObjectOperands.add(0, object);
+
+ } else {
+ requestMap.put("query", object);
+ }
+ }
+
+ private ParserRuleContext parseCollectionQuery(String p) throws QueryException {
+ Lexer collectionQueryLexer = new CollectionQueryLexer((CharStream) null);
+ ParserRuleContext tree = null;
+ // Like p. 111
+ try {
+
+ // Tokenize input data
+ ANTLRInputStream input = new ANTLRInputStream(p);
+ collectionQueryLexer.setInputStream(input);
+ CommonTokenStream tokens = new CommonTokenStream(collectionQueryLexer);
+ parser = new CollectionQueryParser(tokens);
+
+ // Don't throw out erroneous stuff
+ parser.setErrorHandler(new BailErrorStrategy());
+ parser.removeErrorListeners();
+ // Get starting rule from parser
+ Method startRule = CollectionQueryParser.class.getMethod("start");
+ tree = (ParserRuleContext) startRule.invoke(parser, (Object[]) null);
+
+ }
+ // Some things went wrong ...
+ catch (Exception e) {
+ System.err.println(e.getMessage());
+ }
+ if (tree == null) {
+ throw new QueryException("Could not parse expert filter query. Make sure it is correct syntax.");
+ }
+ // Return the generated tree
+ return tree;
+ }
+
+ public static void main(String[] args) {
+ String query = "foo=bar&c=d";
+ query = "(1990<year<2010&genre=Sport)|textClass=politk";
+ query = "(textClass=wissenschaft & textClass=politik) | textClass=ausland";
+ query = "1990<year<2010 & genre=Sport";
+ ExpertFilter filter = new ExpertFilter();
+// filter.verbose = true;
+ try {
+ filter.process(query);
+ } catch (QueryException e) {
+ e.printStackTrace();
+ }
+ System.out.println(filter.getRequestMap());
+
+ }
+
+ @Override
+ public Map<String, Object> getRequestMap() {
+ return requestMap;
+ }
}
diff --git a/src/main/java/de/ids_mannheim/korap/query/serialize/MetaQuery.java b/src/main/java/de/ids_mannheim/korap/query/serialize/MetaQueryBuilder.java
similarity index 79%
rename from src/main/java/de/ids_mannheim/korap/query/serialize/MetaQuery.java
rename to src/main/java/de/ids_mannheim/korap/query/serialize/MetaQueryBuilder.java
index 0eb790d..b9fe4cc 100644
--- a/src/main/java/de/ids_mannheim/korap/query/serialize/MetaQuery.java
+++ b/src/main/java/de/ids_mannheim/korap/query/serialize/MetaQueryBuilder.java
@@ -9,15 +9,15 @@
* @author hanl
* @date 07/02/2014
*/
-public class MetaQuery {
+public class MetaQueryBuilder {
private Map meta;
- public MetaQuery() {
+ public MetaQueryBuilder() {
this.meta = new LinkedHashMap();
}
- public MetaQuery addContext(Integer left, String leftType,
+ public MetaQueryBuilder addContext(Integer left, String leftType,
Integer right, String rightType) {
Map map = new LinkedHashMap();
List l = new LinkedList();
@@ -32,7 +32,7 @@
return this;
}
- public MetaQuery addEntry(String name, Object value) {
+ public MetaQueryBuilder addEntry(String name, Object value) {
meta.put(name, value);
return this;
}
diff --git a/src/main/java/de/ids_mannheim/korap/query/serialize/QuerySerializer.java b/src/main/java/de/ids_mannheim/korap/query/serialize/QuerySerializer.java
index fbe0af0..e3162c1 100644
--- a/src/main/java/de/ids_mannheim/korap/query/serialize/QuerySerializer.java
+++ b/src/main/java/de/ids_mannheim/korap/query/serialize/QuerySerializer.java
@@ -22,9 +22,6 @@
private org.slf4j.Logger log = LoggerFactory
.getLogger(QuerySerializer.class);
- public QuerySerializer() {
-// mapper.enable(SerializationFeature.INDENT_OUTPUT);
- }
/**
* @param args
@@ -167,7 +164,7 @@
public QuerySerializer setMeta(
String cli, String cri, int cls, int crs,
int num, int pageIndex) {
- MetaQuery meta = new MetaQuery();
+ MetaQueryBuilder meta = new MetaQueryBuilder();
meta.addContext(cls, cli, crs, cri);
meta.addEntry("startIndex", pageIndex);
meta.addEntry("count", num);
@@ -175,19 +172,19 @@
return this;
}
- public QuerySerializer setMeta(MetaQuery meta) {
+ public QuerySerializer setMeta(MetaQueryBuilder meta) {
this.meta = meta.raw();
return this;
}
public QuerySerializer setCollection(String collection) {
- CollectionQuery qobj = new CollectionQuery();
+ CollectionQueryBuilder qobj = new CollectionQueryBuilder();
qobj.addResource(collection);
this.collection = qobj.raw();
return this;
}
- public QuerySerializer setCollection(CollectionQuery collections) {
+ public QuerySerializer setCollection(CollectionQueryBuilder collections) {
this.collection = collections.raw();
return this;
}
diff --git a/src/test/java/MetaQuerySerializationTest.java b/src/test/java/MetaQuerySerializationTest.java
index f8dc4e6..7bf8784 100644
--- a/src/test/java/MetaQuerySerializationTest.java
+++ b/src/test/java/MetaQuerySerializationTest.java
@@ -1,9 +1,8 @@
import com.fasterxml.jackson.core.JsonGenerationException;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonMappingException;
-import de.ids_mannheim.korap.query.serialize.CollectionQuery;
+import de.ids_mannheim.korap.query.serialize.CollectionQueryBuilder;
import de.ids_mannheim.korap.query.serialize.QuerySerializer;
-import de.ids_mannheim.korap.resource.Relation;
import de.ids_mannheim.korap.util.QueryException;
import org.junit.Assert;
import org.junit.Test;
@@ -30,7 +29,7 @@
b.append(" AND ");
b.append("textClass:wissenschaft");
// CollectionQuery qu = new CollectionQuery().addMetaFilterQuery(b.toString());
- CollectionQuery query = new CollectionQuery().addMetaFilterQuery(b.toString());
+ CollectionQueryBuilder query = new CollectionQueryBuilder().addMetaFilterQuery(b.toString());
System.out.println(query.buildString());
System.out.println(query.toCollections());
// System.out.println("value reference " + qu.stringify());
@@ -39,7 +38,7 @@
@Test
public void testSingle() throws IOException {
- CollectionQuery query = new CollectionQuery().addMetaFilter("textClass", "wissenschaft");
+ CollectionQueryBuilder query = new CollectionQueryBuilder().addMetaFilter("textClass", "wissenschaft");
// System.out.println("------ TEXT SINGLE " + query.stringify());
System.out.println(query.buildString());
}
@@ -52,7 +51,7 @@
b.append("pubDate:<2012-04-01");
b.append(" AND ");
b.append("author:Goethe");
- CollectionQuery query = new CollectionQuery().addMetaFilterQuery(b.toString());
+ CollectionQueryBuilder query = new CollectionQueryBuilder().addMetaFilterQuery(b.toString());
System.out.println("value until/since : " + query.buildString());
System.out.println("meta value until/since " + query.toCollections());
Assert.assertEquals("[{\"@type\":\"korap:meta-filter\",\"@value\":{\"@type\":\"korap:group\",\"relation\":\"and\",\"operands\":[{\"@type\":\"korap:term\",\"@field\":\"korap:field#author\",\"@value\":\"Goethe\"},{\"@type\":\"korap:group\",\"@field\":\"korap:field#pubDate\",\"relation\":\"between\",\"operands\":[{\"@type\":\"korap:date\",\"@value\":\"2012-04-01\"},{\"@type\":\"korap:date\",\"@value\":\"2013-04-01\"}]}]}}]", query.buildString());
@@ -65,7 +64,7 @@
b.append("pubDate:>2013-12-10");
b.append(" AND ");
b.append("author:Hesse");
- CollectionQuery query = new CollectionQuery().addMetaFilterQuery(b.toString());
+ CollectionQueryBuilder query = new CollectionQueryBuilder().addMetaFilterQuery(b.toString());
System.out.println("Running date check (until) with additional attribute author");
Assert.assertEquals("[{\"@type\":\"korap:meta-filter\",\"@value\":{\"@type\":\"korap:group\",\"relation\":\"and\",\"operands\":[{\"@type\":\"korap:term\",\"@field\":\"korap:field#author\",\"@value\":\"Hesse\"},{\"@type\":\"korap:group\",\"@field\":\"korap:field#pubDate\",\"relation\":\"until\",\"operands\":[{\"@type\":\"korap:date\",\"@value\":\"2013-12-10\"}]}]}}]", query.buildString());
// System.out.println("value until : " + query.stringify());
@@ -78,7 +77,7 @@
b.append("pubDate:<2013-12-10");
b.append(" AND ");
b.append("author:Kafka");
- CollectionQuery query = new CollectionQuery().addMetaFilterQuery(b.toString());
+ CollectionQueryBuilder query = new CollectionQueryBuilder().addMetaFilterQuery(b.toString());
System.out.println("value since : " + query.buildString());
System.out.println("meta value since " + query.toCollections());
// System.out.println();
@@ -126,7 +125,7 @@
b.append("pubDate:<2013-12-10");
b.append(" AND ");
b.append("author:Kafka");
- CollectionQuery q = new CollectionQuery().addMetaFilterQuery(b.toString());
+ CollectionQueryBuilder q = new CollectionQueryBuilder().addMetaFilterQuery(b.toString());
q.addMetaExtend("author", "Hesse");
System.out.println("--- ALL " + q.buildString());
@@ -140,14 +139,14 @@
b.append("pubDate:<2013-12-10");
b.append(" AND ");
b.append("author:Kafka");
- CollectionQuery q = new CollectionQuery().addMetaExtendQuery(b.toString());
+ CollectionQueryBuilder q = new CollectionQueryBuilder().addMetaExtendQuery(b.toString());
System.out.println("array repres " + q.buildString());
System.out.println();
}
@Test
public void testCollections() throws IOException {
- CollectionQuery q = new CollectionQuery().addMetaFilter("corpusID", "A00");
+ CollectionQueryBuilder q = new CollectionQueryBuilder().addMetaFilter("corpusID", "A00");
q.addMetaExtend("corpusID", "A01");
System.out.println("results stringified " + q.buildString());
@@ -164,14 +163,14 @@
@Test
public void testResources() throws IOException {
String meta = "[{\"@type\":\"korap:meta-filter\",\"@value\":{\"@type\":\"korap:term\",\"@field\":\"korap:field#corpusID\",\"@value\":\"WPD\"}}]";
- CollectionQuery q = new CollectionQuery().addResource(meta);
+ CollectionQueryBuilder q = new CollectionQueryBuilder().addResource(meta);
System.out.println("Testing Resource Meta data");
org.junit.Assert.assertEquals("{\"collections\":" + meta + "}", q.toCollections());
}
@Test
public void testA00() throws IOException {
- CollectionQuery q = new CollectionQuery().addMetaExtend("corpusID", "A00").addMetaExtend("corpusID", "A01");
+ CollectionQueryBuilder q = new CollectionQueryBuilder().addMetaExtend("corpusID", "A00").addMetaExtend("corpusID", "A01");
System.out.println("A meta: " + q.buildString());
System.out.println();
}
@@ -179,7 +178,7 @@
@Test
public void testResources2() throws IOException {
String meta = "[{\"@type\":\"korap:meta-filter\",\"@value\":{\"@type\":\"korap:term\",\"@field\":\"korap:field#corpusID\",\"@value\":\"WPD\"}}]";
- CollectionQuery q = new CollectionQuery().addResource(meta);
+ CollectionQueryBuilder q = new CollectionQueryBuilder().addResource(meta);
q.addMetaFilter("textClass", "wissenschaft");
System.out.println("stringified meta " + q.buildString());
System.out.println("meta string " + q.toCollections());