Skip to content

Commit

Permalink
Support paging for scan api (#428)
Browse files Browse the repository at this point in the history
implemented: #360

Change-Id: Idea152b3d53ef519c7ed9847ee9e65092b264e08
  • Loading branch information
zhoney authored Apr 2, 2019
1 parent 582793d commit c4ca312
Show file tree
Hide file tree
Showing 11 changed files with 127 additions and 19 deletions.
2 changes: 1 addition & 1 deletion hugegraph-api/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@
</addDefaultSpecificationEntries>
</manifest>
<manifestEntries>
<Implementation-Version>0.35.0.0</Implementation-Version>
<Implementation-Version>0.36.0.0</Implementation-Version>
</manifestEntries>
</archive>
</configuration>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import java.util.List;

import javax.inject.Singleton;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
Expand All @@ -47,6 +48,8 @@
import com.baidu.hugegraph.util.Log;
import com.codahale.metrics.annotation.Timed;

import static com.baidu.hugegraph.traversal.algorithm.HugeTraverser.DEFAULT_PAGE_LIMIT;

@Path("graphs/{graph}/traversers/edges")
@Singleton
public class EdgesAPI extends API {
Expand Down Expand Up @@ -101,16 +104,23 @@ public String shards(@Context GraphManager manager,
public String scan(@Context GraphManager manager,
@PathParam("graph") String graph,
@QueryParam("start") String start,
@QueryParam("end") String end) {
LOG.debug("Graph [{}] query edges by shard(start: {}, end: {}) ",
graph, start, end);
@QueryParam("end") String end,
@QueryParam("page") String page,
@QueryParam("page_limit")
@DefaultValue(DEFAULT_PAGE_LIMIT) long pageLimit) {
LOG.debug("Graph [{}] query edges by shard(start: {}, end: {}, " +
"page: {}) ", graph, start, end, page);

HugeGraph g = graph(manager, graph);

ConditionQuery query = new ConditionQuery(HugeType.EDGE_OUT);
query.scan(start, end);
query.page(page);
if (query.paging()) {
query.limit(pageLimit);
}
Iterator<Edge> edges = g.edges(query);

return manager.serializer(g).writeEdges(edges, false);
return manager.serializer(g).writeEdges(edges, query.paging());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import java.util.List;

import javax.inject.Singleton;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
Expand All @@ -47,6 +48,8 @@
import com.baidu.hugegraph.util.Log;
import com.codahale.metrics.annotation.Timed;

import static com.baidu.hugegraph.traversal.algorithm.HugeTraverser.DEFAULT_PAGE_LIMIT;

@Path("graphs/{graph}/traversers/vertices")
@Singleton
public class VerticesAPI extends API {
Expand Down Expand Up @@ -101,16 +104,23 @@ public String shards(@Context GraphManager manager,
public String scan(@Context GraphManager manager,
@PathParam("graph") String graph,
@QueryParam("start") String start,
@QueryParam("end") String end) {
LOG.debug("Graph [{}] query vertices by shard(start: {}, end: {}) ",
graph, start, end);
@QueryParam("end") String end,
@QueryParam("page") String page,
@QueryParam("page_limit")
@DefaultValue(DEFAULT_PAGE_LIMIT) long pageLimit) {
LOG.debug("Graph [{}] query vertices by shard(start: {}, end: {}, " +
"page: {}) ", graph, start, end, page);

HugeGraph g = graph(manager, graph);

ConditionQuery query = new ConditionQuery(HugeType.VERTEX);
query.scan(start, end);
query.page(page);
if (query.paging()) {
query.limit(pageLimit);
}
Iterator<Vertex> vertices = g.vertices(query);

return manager.serializer(g).writeVertices(vertices, false);
return manager.serializer(g).writeVertices(vertices, query.paging());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
import com.baidu.hugegraph.api.API;
import com.baidu.hugegraph.backend.id.Id;
import com.baidu.hugegraph.backend.store.Shard;
import com.baidu.hugegraph.iterator.Metadatable;
import com.baidu.hugegraph.schema.EdgeLabel;
import com.baidu.hugegraph.schema.IndexLabel;
import com.baidu.hugegraph.schema.PropertyKey;
Expand Down Expand Up @@ -100,7 +101,16 @@ private String writeIterator(String label, Iterator<?> itor,

// Write page
if (paging) {
String page = TraversalUtil.page((GraphTraversal<?, ?>) itor);
String page;
if (itor instanceof GraphTraversal<?, ?>) {
page = TraversalUtil.page((GraphTraversal<?, ?>) itor);
} else if (itor instanceof Metadatable) {
page = (String) ((Metadatable) itor).metadata("page");
} else {
throw new HugeException(
"Error type '%s' of paging iterator '%s'",
itor.getClass(), itor);
}
if (page != null) {
page = String.format(",\"page\": \"%s\"", page);
} else {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,14 +75,17 @@ public final class ApiVersion {
* [0.29] Issue-39: Add rays and rings RESTful API
* [0.30] Issue-32: Change index create API to return indexLabel and task id
* [0.31] Issue-182: Support restore graph in restoring and merging mode
*
* version 0.9:
* [0.32] Issue-250: Keep depth and degree consistent for traverser api
* [0.33] Issue-305: Implement customized paths and crosspoints RESTful API
* [0.34] Issue-307: Let VertexAPI use simplified property serializer
* [0.35] Issue-287: Support pagination when do index query
* [0.36] Issue-360: Support paging for scan api
*/

// The second parameter of Version.of() is for IDE running without JAR
public static final Version VERSION = Version.of(ApiVersion.class, "0.35");
public static final Version VERSION = Version.of(ApiVersion.class, "0.36");

public static final void check() {
// Check version of hugegraph-core. Firstly do check from version 0.3
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -105,8 +105,8 @@ private Iterator<HugeVertex> queryVerticesByIds(IdQuery query) {

@Override
protected Iterator<HugeEdge> queryEdgesFromBackend(Query query) {
if (query.empty()) {
// Query all edges, don't cache it
if (query.empty() || query.paging()) {
// Query all edges or query edges in paging, don't cache it
return super.queryEdgesFromBackend(query);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,9 @@ public class HugeTraverser {
public static final String DEFAULT_SAMPLE = "100";
public static final String DEFAULT_WEIGHT = "0";

// Empirical value of scan limit, with which results can be returned in 3s
public static final String DEFAULT_PAGE_LIMIT = "100000";

public static final long NO_LIMIT = -1L;

public HugeTraverser(HugeGraph graph) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -203,14 +203,21 @@ protected RowIterator queryByCond(Session session, ConditionQuery query) {
"Invalid scan with multi conditions: %s", query);
Relation scan = query.relations().iterator().next();
Shard shard = (Shard) scan.value();
return this.queryByRange(session, shard);
return this.queryByRange(session, shard, query.page());
}
throw new NotSupportException("query: %s", query);
}

protected RowIterator queryByRange(Session session, Shard shard) {
protected RowIterator queryByRange(Session session, Shard shard,
String page) {
byte[] start = this.shardSpliter.position(shard.start());
byte[] end = this.shardSpliter.position(shard.end());
if (page != null && !page.isEmpty()) {
byte[] position = PageState.fromString(page).position();
E.checkArgument(Bytes.compare(position, start) >= 0,
"Invalid page out of lower bound");
start = position;
}
return session.scan(this.table(), start, end);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@
import com.baidu.hugegraph.exception.NotSupportException;
import com.baidu.hugegraph.iterator.ExtendableIterator;
import com.baidu.hugegraph.type.HugeType;
import com.baidu.hugegraph.util.Bytes;
import com.baidu.hugegraph.util.E;
import com.baidu.hugegraph.util.Log;
import com.google.common.collect.ImmutableList;
Expand Down Expand Up @@ -193,14 +194,21 @@ protected BackendColumnIterator queryByCond(Session session,
"Invalid scan with multi conditions: %s", query);
Relation scan = query.relations().iterator().next();
Shard shard = (Shard) scan.value();
return this.queryByRange(session, shard);
return this.queryByRange(session, shard, query.page());
}
throw new NotSupportException("query: %s", query);
}

protected BackendColumnIterator queryByRange(Session session, Shard shard) {
protected BackendColumnIterator queryByRange(Session session, Shard shard,
String page) {
byte[] start = this.shardSpliter.position(shard.start());
byte[] end = this.shardSpliter.position(shard.end());
if (page != null && !page.isEmpty()) {
byte[] position = PageState.fromString(page).position();
E.checkArgument(Bytes.compare(position, start) >= 0,
"Invalid page out of lower bound");
start = position;
}
return session.scan(this.table(), start, end);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import java.util.Date;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.Set;
Expand All @@ -43,6 +44,7 @@
import com.baidu.hugegraph.HugeGraph;
import com.baidu.hugegraph.backend.BackendException;
import com.baidu.hugegraph.backend.id.Id;
import com.baidu.hugegraph.backend.page.PageState;
import com.baidu.hugegraph.backend.query.ConditionQuery;
import com.baidu.hugegraph.backend.query.Query;
import com.baidu.hugegraph.backend.serializer.BytesBuffer;
Expand All @@ -51,6 +53,7 @@
import com.baidu.hugegraph.config.CoreOptions;
import com.baidu.hugegraph.exception.LimitExceedException;
import com.baidu.hugegraph.exception.NotFoundException;
import com.baidu.hugegraph.iterator.Metadatable;
import com.baidu.hugegraph.schema.SchemaManager;
import com.baidu.hugegraph.testutil.Assert;
import com.baidu.hugegraph.testutil.FakeObjects.FakeEdge;
Expand Down Expand Up @@ -1745,6 +1748,32 @@ public void testScanEdge() {
Assert.assertEquals(18, edges.size());
}

@Test
public void testScanEdgeInPaging() {
HugeGraph graph = graph();
Assume.assumeTrue("Not support scan",
storeFeatures().supportsScanToken() ||
storeFeatures().supportsScanKeyRange());
init18Edges();

List<Edge> edges = new LinkedList<>();

ConditionQuery query = new ConditionQuery(HugeType.EDGE);
query.scan(String.valueOf(Long.MIN_VALUE),
String.valueOf(Long.MAX_VALUE));
query.limit(1);
String page = PageState.PAGE_NONE;
while (page != null) {
query.page(page);
Iterator<Edge> iterator = graph.edges(query);
while (iterator.hasNext()) {
edges.add(iterator.next());
}
page = (String) ((Metadatable) iterator).metadata("page");
}
Assert.assertEquals(18, edges.size());
}

@Test
public void testRemoveEdge() {
HugeGraph graph = graph();
Expand Down Expand Up @@ -2562,7 +2591,7 @@ public void testQueryEdgeByPageResultsMatched() {

GraphTraversal<Edge, Edge> itor;

String page = "";
String page = PageState.PAGE_NONE;
int size = 20;

for (int i = 0; i < 100 / size; i++) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,12 +46,14 @@
import com.baidu.hugegraph.backend.id.IdGenerator;
import com.baidu.hugegraph.backend.id.SnowflakeIdGenerator;
import com.baidu.hugegraph.backend.id.SplicingIdGenerator;
import com.baidu.hugegraph.backend.page.PageState;
import com.baidu.hugegraph.backend.query.ConditionQuery;
import com.baidu.hugegraph.backend.query.Query;
import com.baidu.hugegraph.backend.store.BackendFeatures;
import com.baidu.hugegraph.backend.store.Shard;
import com.baidu.hugegraph.backend.tx.GraphTransaction;
import com.baidu.hugegraph.exception.NoIndexException;
import com.baidu.hugegraph.iterator.Metadatable;
import com.baidu.hugegraph.schema.PropertyKey;
import com.baidu.hugegraph.schema.SchemaManager;
import com.baidu.hugegraph.schema.VertexLabel;
Expand Down Expand Up @@ -3008,6 +3010,32 @@ public void testScanVertex() {
Assert.assertEquals(10, vertexes.size());
}

@Test
public void testScanVertexInPaging() {
HugeGraph graph = graph();
Assume.assumeTrue("Not support scan",
storeFeatures().supportsScanToken() ||
storeFeatures().supportsScanKeyRange());
init10Vertices();

List<Vertex> vertexes = new LinkedList<>();

ConditionQuery query = new ConditionQuery(HugeType.VERTEX);
query.scan(String.valueOf(Long.MIN_VALUE),
String.valueOf(Long.MAX_VALUE));
query.limit(1);
String page = PageState.PAGE_NONE;
while (page != null) {
query.page(page);
Iterator<Vertex> iterator = graph.vertices(query);
while (iterator.hasNext()) {
vertexes.add(iterator.next());
}
page = (String) ((Metadatable) iterator).metadata("page");
}
Assert.assertEquals(10, vertexes.size());
}

@Test
public void testScanVertexWithSplitSizeLt1MB() {
HugeGraph graph = graph();
Expand Down Expand Up @@ -3250,7 +3278,7 @@ public void testQueryByPageResultsMatched() {

GraphTraversal<Vertex, Vertex> itor;

String page = "";
String page = PageState.PAGE_NONE;
int size = 20;

for (int i = 0; i < 100 / size; i++) {
Expand Down

0 comments on commit c4ca312

Please sign in to comment.