Skip to content

Commit

Permalink
Support paging for scan api
Browse files Browse the repository at this point in the history
implemented: #360

Change-Id: Idea152b3d53ef519c7ed9847ee9e65092b264e08
  • Loading branch information
zhoney committed Mar 27, 2019
1 parent c50c34a commit 607b9cb
Show file tree
Hide file tree
Showing 10 changed files with 108 additions and 17 deletions.
2 changes: 1 addition & 1 deletion hugegraph-api/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@
</addDefaultSpecificationEntries>
</manifest>
<manifestEntries>
<Implementation-Version>0.35.0.0</Implementation-Version>
<Implementation-Version>0.36.0.0</Implementation-Version>
</manifestEntries>
</archive>
</configuration>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@
import com.baidu.hugegraph.api.filter.CompressInterceptor.Compress;
import com.baidu.hugegraph.backend.id.Id;
import com.baidu.hugegraph.backend.query.ConditionQuery;
import com.baidu.hugegraph.backend.query.Query;
import com.baidu.hugegraph.backend.store.Shard;
import com.baidu.hugegraph.core.GraphManager;
import com.baidu.hugegraph.server.RestServer;
Expand Down Expand Up @@ -101,16 +102,19 @@ public String shards(@Context GraphManager manager,
public String scan(@Context GraphManager manager,
@PathParam("graph") String graph,
@QueryParam("start") String start,
@QueryParam("end") String end) {
LOG.debug("Graph [{}] query edges by shard(start: {}, end: {}) ",
graph, start, end);
@QueryParam("end") String end,
@QueryParam("page") String page) {
LOG.debug("Graph [{}] query edges by shard(start: {}, end: {}, " +
"page: {}) ", graph, start, end, page);

HugeGraph g = graph(manager, graph);

ConditionQuery query = new ConditionQuery(HugeType.EDGE_OUT);
query.scan(start, end);
query.limit(Query.DEFAULT_CAPACITY);
query.page(page);
Iterator<Edge> edges = g.edges(query);

return manager.serializer(g).writeEdges(edges, false);
return manager.serializer(g).writeEdges(edges, true);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@
import com.baidu.hugegraph.api.graph.VertexAPI;
import com.baidu.hugegraph.backend.id.Id;
import com.baidu.hugegraph.backend.query.ConditionQuery;
import com.baidu.hugegraph.backend.query.Query;
import com.baidu.hugegraph.backend.store.Shard;
import com.baidu.hugegraph.core.GraphManager;
import com.baidu.hugegraph.server.RestServer;
Expand Down Expand Up @@ -101,16 +102,19 @@ public String shards(@Context GraphManager manager,
public String scan(@Context GraphManager manager,
@PathParam("graph") String graph,
@QueryParam("start") String start,
@QueryParam("end") String end) {
LOG.debug("Graph [{}] query vertices by shard(start: {}, end: {}) ",
graph, start, end);
@QueryParam("end") String end,
@QueryParam("page") String page) {
LOG.debug("Graph [{}] query vertices by shard(start: {}, end: {}, " +
"page: {}) ", graph, start, end, page);

HugeGraph g = graph(manager, graph);

ConditionQuery query = new ConditionQuery(HugeType.VERTEX);
query.scan(start, end);
query.limit(Query.DEFAULT_CAPACITY);
query.page(page);
Iterator<Vertex> vertices = g.vertices(query);

return manager.serializer(g).writeVertices(vertices, false);
return manager.serializer(g).writeVertices(vertices, true);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
import com.baidu.hugegraph.api.API;
import com.baidu.hugegraph.backend.id.Id;
import com.baidu.hugegraph.backend.store.Shard;
import com.baidu.hugegraph.iterator.Metadatable;
import com.baidu.hugegraph.schema.EdgeLabel;
import com.baidu.hugegraph.schema.IndexLabel;
import com.baidu.hugegraph.schema.PropertyKey;
Expand Down Expand Up @@ -100,7 +101,16 @@ private String writeIterator(String label, Iterator<?> itor,

// Write page
if (paging) {
String page = TraversalUtil.page((GraphTraversal<?, ?>) itor);
String page;
if (itor instanceof GraphTraversal<?, ?>) {
page = TraversalUtil.page((GraphTraversal<?, ?>) itor);
} else if (itor instanceof Metadatable) {
page = (String) ((Metadatable) itor).metadata("page");
} else {
throw new HugeException(
"Error type '%s' of paging iterator '%s'",
itor.getClass(), itor);
}
if (page != null) {
page = String.format(",\"page\": \"%s\"", page);
} else {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,14 +75,17 @@ public final class ApiVersion {
* [0.29] Issue-39: Add rays and rings RESTful API
* [0.30] Issue-32: Change index create API to return indexLabel and task id
* [0.31] Issue-182: Support restore graph in restoring and merging mode
*
* version 0.9:
* [0.32] Issue-250: Keep depth and degree consistent for traverser api
* [0.33] Issue-305: Implement customized paths and crosspoints RESTful API
* [0.34] Issue-307: Let VertexAPI use simplified property serializer
* [0.35] Issue-287: Support pagination when do index query
* [0.36] Issue-360: Support paging for scan api
*/

// The second parameter of Version.of() is for IDE running without JAR
public static final Version VERSION = Version.of(ApiVersion.class, "0.35");
public static final Version VERSION = Version.of(ApiVersion.class, "0.36");

public static final void check() {
// Check version of hugegraph-core. Firstly do check from version 0.3
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -105,8 +105,8 @@ private Iterator<HugeVertex> queryVerticesByIds(IdQuery query) {

@Override
protected Iterator<HugeEdge> queryEdgesFromBackend(Query query) {
if (query.empty()) {
// Query all edges, don't cache it
if (query.empty() || query.paging()) {
// Query all edges or query edges in paging, don't cache it
return super.queryEdgesFromBackend(query);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -203,14 +203,21 @@ protected RowIterator queryByCond(Session session, ConditionQuery query) {
"Invalid scan with multi conditions: %s", query);
Relation scan = query.relations().iterator().next();
Shard shard = (Shard) scan.value();
return this.queryByRange(session, shard);
return this.queryByRange(session, shard, query.page());
}
throw new NotSupportException("query: %s", query);
}

protected RowIterator queryByRange(Session session, Shard shard) {
protected RowIterator queryByRange(Session session, Shard shard,
String page) {
byte[] start = this.shardSpliter.position(shard.start());
byte[] end = this.shardSpliter.position(shard.end());
if (page != null && !page.isEmpty()) {
byte[] position = PageState.fromString(page).position();
E.checkArgument(Bytes.compare(position, start) >= 0,
"Invalid page out of lower bound");
start = position;
}
return session.scan(this.table(), start, end);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@
import com.baidu.hugegraph.exception.NotSupportException;
import com.baidu.hugegraph.iterator.ExtendableIterator;
import com.baidu.hugegraph.type.HugeType;
import com.baidu.hugegraph.util.Bytes;
import com.baidu.hugegraph.util.E;
import com.baidu.hugegraph.util.Log;
import com.google.common.collect.ImmutableList;
Expand Down Expand Up @@ -193,14 +194,21 @@ protected BackendColumnIterator queryByCond(Session session,
"Invalid scan with multi conditions: %s", query);
Relation scan = query.relations().iterator().next();
Shard shard = (Shard) scan.value();
return this.queryByRange(session, shard);
return this.queryByRange(session, shard, query.page());
}
throw new NotSupportException("query: %s", query);
}

protected BackendColumnIterator queryByRange(Session session, Shard shard) {
protected BackendColumnIterator queryByRange(Session session, Shard shard,
String page) {
byte[] start = this.shardSpliter.position(shard.start());
byte[] end = this.shardSpliter.position(shard.end());
if (page != null && !page.isEmpty()) {
byte[] position = PageState.fromString(page).position();
E.checkArgument(Bytes.compare(position, start) >= 0,
"Invalid page out of lower bound");
start = position;
}
return session.scan(this.table(), start, end);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import java.util.Date;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.Set;
Expand Down Expand Up @@ -51,6 +52,7 @@
import com.baidu.hugegraph.config.CoreOptions;
import com.baidu.hugegraph.exception.LimitExceedException;
import com.baidu.hugegraph.exception.NotFoundException;
import com.baidu.hugegraph.iterator.Metadatable;
import com.baidu.hugegraph.schema.SchemaManager;
import com.baidu.hugegraph.testutil.Assert;
import com.baidu.hugegraph.testutil.FakeObjects.FakeEdge;
Expand Down Expand Up @@ -1745,6 +1747,32 @@ public void testScanEdge() {
Assert.assertEquals(18, edges.size());
}

@Test
public void testScanEdgeInPaging() {
HugeGraph graph = graph();
Assume.assumeTrue("Not support scan",
storeFeatures().supportsScanToken() ||
storeFeatures().supportsScanKeyRange());
init18Edges();

List<Edge> edges = new LinkedList<>();

ConditionQuery query = new ConditionQuery(HugeType.EDGE);
query.scan(String.valueOf(Long.MIN_VALUE),
String.valueOf(Long.MAX_VALUE));
query.limit(1);
String page = "";
while (page != null) {
query.page(page);
Iterator<Edge> iterator = graph.edges(query);
while (iterator.hasNext()) {
edges.add(iterator.next());
}
page = (String) ((Metadatable) iterator).metadata("page");
}
Assert.assertEquals(18, edges.size());
}

@Test
public void testRemoveEdge() {
HugeGraph graph = graph();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@
import com.baidu.hugegraph.backend.store.Shard;
import com.baidu.hugegraph.backend.tx.GraphTransaction;
import com.baidu.hugegraph.exception.NoIndexException;
import com.baidu.hugegraph.iterator.Metadatable;
import com.baidu.hugegraph.schema.PropertyKey;
import com.baidu.hugegraph.schema.SchemaManager;
import com.baidu.hugegraph.schema.VertexLabel;
Expand Down Expand Up @@ -3008,6 +3009,32 @@ public void testScanVertex() {
Assert.assertEquals(10, vertexes.size());
}

@Test
public void testScanVertexInPaging() {
HugeGraph graph = graph();
Assume.assumeTrue("Not support scan",
storeFeatures().supportsScanToken() ||
storeFeatures().supportsScanKeyRange());
init10Vertices();

List<Vertex> vertexes = new LinkedList<>();

ConditionQuery query = new ConditionQuery(HugeType.VERTEX);
query.scan(String.valueOf(Long.MIN_VALUE),
String.valueOf(Long.MAX_VALUE));
query.limit(1);
String page = "";
while (page != null) {
query.page(page);
Iterator<Vertex> iterator = graph.vertices(query);
while (iterator.hasNext()) {
vertexes.add(iterator.next());
}
page = (String) ((Metadatable) iterator).metadata("page");
}
Assert.assertEquals(10, vertexes.size());
}

@Test
public void testScanVertexWithSplitSizeLt1MB() {
HugeGraph graph = graph();
Expand Down

0 comments on commit 607b9cb

Please sign in to comment.