From 8179afac924a73ff52f4c896564003608515b46c Mon Sep 17 00:00:00 2001 From: DanGuge <77946882+DanGuge@users.noreply.github.com> Date: Sat, 19 Aug 2023 13:41:08 +0800 Subject: [PATCH 01/24] feat(api&core): in oltp apis, add statistics info and support full info about vertices and edges (#2262) * chore: improve gitignore file * feat: add ApiMeasure to collect runtime data ApiMeasure will count the number of vertices and edges traversed at runtime, and the time the api takes to execute * feat: Add ApiMeasure to JsonSerializer and Modify the Serializer interface * JsonSerializer: return measure information in api response * Serializer: fit the feature that returns complete information about vertices and edges * refactor: format code based on hugegraph-style.xml * feat: Add statistics information in all oltp restful apis response and Support full information about vertices and edges Statistics information: * add vertexIterCounter and edgeIterCounter in HugeTraverser.java to track traversed vertices and edges at run time * modify all oltp restful apis to add statistics information in response Full information about vertices and edges: * add 'with_vertex' and 'with_edge' parameter option in apis * modify oltp apis to support vertex and edge information in api response * add EdgeRecord in HugeTraverser.java to record edges at run time and generate the edge information returned in api response * modify Path and PathSet in HugeTraverser.java to support full edge information storage * modify all traversers to support track of edge information at run time * fix: numeric cast * fix: Jaccard Similarity api test * fix: adjust the code style and naming convention * Empty commit * Empty commit * fix: 1. change System.currentTimeMillis() to System.nanoTime(); 2. modify addCount() * fix: rollback change in .gitignore * fix: rollback ServerOptions.java code style * fix: rollback API.java code style and add exception in else branch * fix: fix code style * fix: name style & code style * rename edgeRecord to edgeResults * fix Request class code style in SameNeighborsAPI.java --- .../java/org/apache/hugegraph/api/API.java | 89 ++- .../api/traversers/AllShortestPathsAPI.java | 73 ++- .../api/traversers/CrosspointsAPI.java | 30 +- .../traversers/CustomizedCrosspointsAPI.java | 125 ++-- .../api/traversers/CustomizedPathsAPI.java | 103 ++-- .../hugegraph/api/traversers/EdgesAPI.java | 28 +- .../api/traversers/FusiformSimilarityAPI.java | 66 ++- .../api/traversers/JaccardSimilarityAPI.java | 55 +- .../api/traversers/KneighborAPI.java | 78 ++- .../hugegraph/api/traversers/KoutAPI.java | 89 ++- .../traversers/MultiNodeShortestPathAPI.java | 82 +-- .../hugegraph/api/traversers/PathsAPI.java | 102 ++-- .../hugegraph/api/traversers/RaysAPI.java | 67 ++- .../hugegraph/api/traversers/RingsAPI.java | 74 ++- .../api/traversers/SameNeighborsAPI.java | 108 +++- .../api/traversers/ShortestPathAPI.java | 67 ++- .../SingleSourceShortestPathAPI.java | 79 ++- .../api/traversers/TemplatePathsAPI.java | 104 ++-- .../hugegraph/api/traversers/VerticesAPI.java | 28 +- .../traversers/WeightedShortestPathAPI.java | 84 ++- .../hugegraph/config/ServerOptions.java | 2 +- .../apache/hugegraph/core/GraphManager.java | 4 + .../hugegraph/serializer/JsonSerializer.java | 153 +++-- .../hugegraph/serializer/Serializer.java | 24 +- .../comm/TriangleCountAlgorithm.java | 38 +- .../algorithm/CollectionPathsTraverser.java | 58 +- .../algorithm/CustomizePathsTraverser.java | 82 +-- .../CustomizedCrosspointsTraverser.java | 133 +++-- .../FusiformSimilarityTraverser.java | 67 ++- .../traversal/algorithm/HugeTraverser.java | 541 +++++++++++------- .../algorithm/JaccardSimilarTraverser.java | 49 +- .../algorithm/KneighborTraverser.java | 16 +- .../traversal/algorithm/KoutTraverser.java | 24 +- .../MultiNodeShortestPathTraverser.java | 117 ++-- .../traversal/algorithm/PathTraverser.java | 25 +- .../traversal/algorithm/PathsTraverser.java | 23 +- .../algorithm/SameNeighborTraverser.java | 57 +- .../algorithm/ShortestPathTraverser.java | 73 ++- .../SingleSourceShortestPathTraverser.java | 206 ++++--- .../algorithm/SubGraphTraverser.java | 175 +++--- .../algorithm/TemplatePathsTraverser.java | 58 +- .../records/ShortestPathRecords.java | 4 +- .../records/SingleWayMultiPathsRecords.java | 18 +- .../traversers/JaccardSimilarityApiTest.java | 12 +- 44 files changed, 2268 insertions(+), 1222 deletions(-) diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/API.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/API.java index afaba499b3..99fe67e5ba 100644 --- a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/API.java +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/API.java @@ -22,41 +22,39 @@ import java.util.concurrent.Callable; import java.util.function.Consumer; -import jakarta.ws.rs.ForbiddenException; -import jakarta.ws.rs.NotFoundException; -import jakarta.ws.rs.NotSupportedException; -import jakarta.ws.rs.core.MediaType; - +import org.apache.commons.lang.mutable.MutableLong; +import org.apache.hugegraph.HugeException; +import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.core.GraphManager; import org.apache.hugegraph.define.Checkable; import org.apache.hugegraph.metrics.MetricsUtil; -import org.slf4j.Logger; - -import org.apache.hugegraph.HugeException; -import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.util.E; +import org.apache.hugegraph.util.InsertionOrderUtil; import org.apache.hugegraph.util.JsonUtil; import org.apache.hugegraph.util.Log; +import org.slf4j.Logger; + import com.codahale.metrics.Meter; import com.google.common.collect.ImmutableMap; -public class API { +import jakarta.ws.rs.ForbiddenException; +import jakarta.ws.rs.NotFoundException; +import jakarta.ws.rs.NotSupportedException; +import jakarta.ws.rs.core.MediaType; - protected static final Logger LOG = Log.logger(API.class); +public class API { public static final String CHARSET = "UTF-8"; - public static final String TEXT_PLAIN = MediaType.TEXT_PLAIN; public static final String APPLICATION_JSON = MediaType.APPLICATION_JSON; public static final String APPLICATION_JSON_WITH_CHARSET = APPLICATION_JSON + ";charset=" + CHARSET; public static final String JSON = MediaType.APPLICATION_JSON_TYPE .getSubtype(); - public static final String ACTION_APPEND = "append"; public static final String ACTION_ELIMINATE = "eliminate"; public static final String ACTION_CLEAR = "clear"; - + protected static final Logger LOG = Log.logger(API.class); private static final Meter SUCCEED_METER = MetricsUtil.registerMeter(API.class, "commit-succeed"); private static final Meter ILLEGAL_ARG_ERROR_METER = @@ -69,8 +67,7 @@ public class API { public static HugeGraph graph(GraphManager manager, String graph) { HugeGraph g = manager.graph(graph); if (g == null) { - throw new NotFoundException(String.format( - "Graph '%s' does not exist", graph)); + throw new NotFoundException(String.format("Graph '%s' does not exist", graph)); } return g; } @@ -140,8 +137,7 @@ protected static void checkUpdatingBody(Checkable body) { body.checkUpdate(); } - protected static void checkCreatingBody( - Collection bodies) { + protected static void checkCreatingBody(Collection bodies) { E.checkArgumentNotNull(bodies, "The request body can't be empty"); for (Checkable body : bodies) { E.checkArgument(body != null, @@ -150,8 +146,7 @@ protected static void checkCreatingBody( } } - protected static void checkUpdatingBody( - Collection bodies) { + protected static void checkUpdatingBody(Collection bodies) { E.checkArgumentNotNull(bodies, "The request body can't be empty"); for (Checkable body : bodies) { E.checkArgumentNotNull(body, @@ -186,8 +181,58 @@ public static boolean checkAndParseAction(String action) { } else if (action.equals(ACTION_ELIMINATE)) { return false; } else { - throw new NotSupportedException( - String.format("Not support action '%s'", action)); + throw new NotSupportedException(String.format("Not support action '%s'", action)); + } + } + + public static class ApiMeasurer { + + public static final String EDGE_ITER = "edge_iterations"; + public static final String VERTICE_ITER = "vertice_iterations"; + public static final String COST = "cost(ns)"; + private final long timeStart; + private final Map measures; + + public ApiMeasurer() { + this.timeStart = System.nanoTime(); + this.measures = InsertionOrderUtil.newMap(); + } + + public Map measures() { + measures.put(COST, System.nanoTime() - timeStart); + return measures; + } + + public void put(String key, String value) { + this.measures.put(key, value); + } + + public void put(String key, long value) { + this.measures.put(key, value); + } + + public void put(String key, int value) { + this.measures.put(key, value); + } + + protected void addCount(String key, long value) { + Object current = measures.get(key); + if (current == null) { + measures.put(key, new MutableLong(value)); + } else if (current instanceof MutableLong) { + ((MutableLong) measures.computeIfAbsent(key, MutableLong::new)).add(value); + } else if (current instanceof Long) { + Long currentLong = (Long) current; + measures.put(key, new MutableLong(currentLong + value)); + } else { + throw new NotSupportedException("addCount() method's 'value' datatype must be " + + "Long or MutableLong"); + } + } + + public void addIterCount(long verticeIters, long edgeIters) { + this.addCount(EDGE_ITER, edgeIters); + this.addCount(VERTICE_ITER, verticeIters); } } } diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/AllShortestPathsAPI.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/AllShortestPathsAPI.java index 030c4e8cc1..e432f81ea7 100644 --- a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/AllShortestPathsAPI.java +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/AllShortestPathsAPI.java @@ -20,19 +20,10 @@ import static org.apache.hugegraph.traversal.algorithm.HugeTraverser.DEFAULT_CAPACITY; import static org.apache.hugegraph.traversal.algorithm.HugeTraverser.DEFAULT_MAX_DEGREE; +import java.util.HashSet; +import java.util.Iterator; import java.util.List; - -import io.swagger.v3.oas.annotations.tags.Tag; -import jakarta.inject.Singleton; -import jakarta.ws.rs.DefaultValue; -import jakarta.ws.rs.GET; -import jakarta.ws.rs.Path; -import jakarta.ws.rs.PathParam; -import jakarta.ws.rs.Produces; -import jakarta.ws.rs.QueryParam; -import jakarta.ws.rs.core.Context; - -import org.slf4j.Logger; +import java.util.Set; import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.api.API; @@ -44,9 +35,22 @@ import org.apache.hugegraph.traversal.algorithm.ShortestPathTraverser; import org.apache.hugegraph.type.define.Directions; import org.apache.hugegraph.util.Log; +import org.apache.tinkerpop.gremlin.structure.Edge; +import org.slf4j.Logger; + import com.codahale.metrics.annotation.Timed; import com.google.common.collect.ImmutableList; +import io.swagger.v3.oas.annotations.tags.Tag; +import jakarta.inject.Singleton; +import jakarta.ws.rs.DefaultValue; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.core.Context; + @Path("graphs/{graph}/traversers/allshortestpaths") @Singleton @Tag(name = "AllShortestPathsAPI") @@ -68,13 +72,20 @@ public String get(@Context GraphManager manager, @DefaultValue(DEFAULT_MAX_DEGREE) long maxDegree, @QueryParam("skip_degree") @DefaultValue("0") long skipDegree, + @QueryParam("with_vertex") + @DefaultValue("false") boolean withVertex, + @QueryParam("with_edge") + @DefaultValue("false") boolean withEdge, @QueryParam("capacity") @DefaultValue(DEFAULT_CAPACITY) long capacity) { LOG.debug("Graph [{}] get shortest path from '{}', to '{}' with " + "direction {}, edge label {}, max depth '{}', " + - "max degree '{}', skipped degree '{}' and capacity '{}'", + "max degree '{}', skipped degree '{}', capacity '{}', " + + "with_vertex '{}' and with_edge '{}'", graph, source, target, direction, edgeLabel, depth, - maxDegree, skipDegree, capacity); + maxDegree, skipDegree, capacity, withVertex, withEdge); + + ApiMeasurer measure = new ApiMeasurer(); Id sourceId = VertexAPI.checkAndParseVertexId(source); Id targetId = VertexAPI.checkAndParseVertexId(target); @@ -85,9 +96,35 @@ public String get(@Context GraphManager manager, ShortestPathTraverser traverser = new ShortestPathTraverser(g); List edgeLabels = edgeLabel == null ? ImmutableList.of() : ImmutableList.of(edgeLabel); - HugeTraverser.PathSet paths = traverser.allShortestPaths( - sourceId, targetId, dir, edgeLabels, - depth, maxDegree, skipDegree, capacity); - return manager.serializer(g).writePaths("paths", paths, false); + HugeTraverser.PathSet paths = traverser.allShortestPaths(sourceId, targetId, dir, + edgeLabels, depth, maxDegree, + skipDegree, capacity); + + measure.addIterCount(traverser.vertexIterCounter.get(), + traverser.edgeIterCounter.get()); + + Iterator iterVertex; + Set vertexIds = new HashSet<>(); + for (HugeTraverser.Path path : paths) { + vertexIds.addAll(path.vertices()); + } + if (withVertex && !vertexIds.isEmpty()) { + iterVertex = g.vertices(vertexIds.toArray()); + measure.addIterCount(vertexIds.size(), 0L); + } else { + iterVertex = vertexIds.iterator(); + } + + Iterator iterEdge; + Set edges = paths.getEdges(); + if (withEdge && !edges.isEmpty()) { + iterEdge = edges.iterator(); + } else { + iterEdge = HugeTraverser.EdgeRecord.getEdgeIds(edges).iterator(); + } + + return manager.serializer(g, measure.measures()) + .writePaths("paths", paths, false, + iterVertex, iterEdge); } } diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/CrosspointsAPI.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/CrosspointsAPI.java index 39de473b8f..eda042511c 100644 --- a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/CrosspointsAPI.java +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/CrosspointsAPI.java @@ -21,18 +21,6 @@ import static org.apache.hugegraph.traversal.algorithm.HugeTraverser.DEFAULT_MAX_DEGREE; import static org.apache.hugegraph.traversal.algorithm.HugeTraverser.DEFAULT_PATHS_LIMIT; -import io.swagger.v3.oas.annotations.tags.Tag; -import jakarta.inject.Singleton; -import jakarta.ws.rs.DefaultValue; -import jakarta.ws.rs.GET; -import jakarta.ws.rs.Path; -import jakarta.ws.rs.PathParam; -import jakarta.ws.rs.Produces; -import jakarta.ws.rs.QueryParam; -import jakarta.ws.rs.core.Context; - -import org.slf4j.Logger; - import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.api.API; import org.apache.hugegraph.api.graph.EdgeAPI; @@ -43,8 +31,20 @@ import org.apache.hugegraph.traversal.algorithm.PathsTraverser; import org.apache.hugegraph.type.define.Directions; import org.apache.hugegraph.util.Log; +import org.slf4j.Logger; + import com.codahale.metrics.annotation.Timed; +import io.swagger.v3.oas.annotations.tags.Tag; +import jakarta.inject.Singleton; +import jakarta.ws.rs.DefaultValue; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.core.Context; + @Path("graphs/{graph}/traversers/crosspoints") @Singleton @Tag(name = "CrosspointsAPI") @@ -74,6 +74,7 @@ public String get(@Context GraphManager manager, graph, source, target, direction, edgeLabel, depth, maxDegree, capacity, limit); + ApiMeasurer measure = new ApiMeasurer(); Id sourceId = VertexAPI.checkAndParseVertexId(source); Id targetId = VertexAPI.checkAndParseVertexId(target); Directions dir = Directions.convert(EdgeAPI.parseDirection(direction)); @@ -84,6 +85,9 @@ public String get(@Context GraphManager manager, dir, edgeLabel, depth, maxDegree, capacity, limit); - return manager.serializer(g).writePaths("crosspoints", paths, true); + measure.addIterCount(traverser.vertexIterCounter.get(), + traverser.edgeIterCounter.get()); + return manager.serializer(g, measure.measures()) + .writePaths("crosspoints", paths, true); } } diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/CustomizedCrosspointsAPI.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/CustomizedCrosspointsAPI.java index da35f7325f..cadbd2ce00 100644 --- a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/CustomizedCrosspointsAPI.java +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/CustomizedCrosspointsAPI.java @@ -22,38 +22,39 @@ import static org.apache.hugegraph.traversal.algorithm.HugeTraverser.DEFAULT_PATHS_LIMIT; import java.util.ArrayList; +import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; -import io.swagger.v3.oas.annotations.tags.Tag; -import jakarta.inject.Singleton; -import jakarta.ws.rs.Consumes; -import jakarta.ws.rs.POST; -import jakarta.ws.rs.Path; -import jakarta.ws.rs.PathParam; -import jakarta.ws.rs.Produces; -import jakarta.ws.rs.core.Context; - -import org.apache.tinkerpop.gremlin.structure.Vertex; -import org.apache.hugegraph.core.GraphManager; -import org.slf4j.Logger; - import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.api.API; import org.apache.hugegraph.backend.id.Id; -import org.apache.hugegraph.backend.query.QueryResults; +import org.apache.hugegraph.core.GraphManager; import org.apache.hugegraph.traversal.algorithm.CustomizedCrosspointsTraverser; import org.apache.hugegraph.traversal.algorithm.HugeTraverser; import org.apache.hugegraph.type.define.Directions; import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.Log; +import org.apache.tinkerpop.gremlin.structure.Edge; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.slf4j.Logger; + import com.codahale.metrics.annotation.Timed; import com.fasterxml.jackson.annotation.JsonAlias; import com.fasterxml.jackson.annotation.JsonProperty; +import io.swagger.v3.oas.annotations.tags.Tag; +import jakarta.inject.Singleton; +import jakarta.ws.rs.Consumes; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.core.Context; + @Path("graphs/{graph}/traversers/customizedcrosspoints") @Singleton @Tag(name = "CustomizedCrosspointsAPI") @@ -61,6 +62,21 @@ public class CustomizedCrosspointsAPI extends API { private static final Logger LOG = Log.logger(CustomizedCrosspointsAPI.class); + private static List pathPatterns( + HugeGraph graph, CrosspointsRequest request) { + int stepSize = request.pathPatterns.size(); + List pathPatterns = new ArrayList<>(stepSize); + for (PathPattern pattern : request.pathPatterns) { + CustomizedCrosspointsTraverser.PathPattern pathPattern = + new CustomizedCrosspointsTraverser.PathPattern(); + for (Step step : pattern.steps) { + pathPattern.add(step.jsonToStep(graph)); + } + pathPatterns.add(pathPattern); + } + return pathPatterns; + } + @POST @Timed @Consumes(APPLICATION_JSON) @@ -78,55 +94,56 @@ public String post(@Context GraphManager manager, "The steps of crosspoints request can't be empty"); LOG.debug("Graph [{}] get customized crosspoints from source vertex " + - "'{}', with path_pattern '{}', with_path '{}', with_vertex " + - "'{}', capacity '{}' and limit '{}'", graph, request.sources, - request.pathPatterns, request.withPath, request.withVertex, - request.capacity, request.limit); + "'{}', with path_pattern '{}', with path '{}', with_vertex " + + "'{}', capacity '{}', limit '{}' and with_edge '{}'", + graph, request.sources, request.pathPatterns, request.withPath, + request.withVertex, request.capacity, request.limit, request.withEdge); + + ApiMeasurer measure = new ApiMeasurer(); HugeGraph g = graph(manager, graph); Iterator sources = request.sources.vertices(g); - List patterns; - patterns = pathPatterns(g, request); CustomizedCrosspointsTraverser traverser = - new CustomizedCrosspointsTraverser(g); - CustomizedCrosspointsTraverser.CrosspointsPaths paths; - paths = traverser.crosspointsPaths(sources, patterns, request.capacity, - request.limit); - Iterator iter = QueryResults.emptyIterator(); - if (!request.withVertex) { - return manager.serializer(g).writeCrosspoints(paths, iter, - request.withPath); - } - Set ids = new HashSet<>(); + new CustomizedCrosspointsTraverser(g); + + List patterns = pathPatterns(g, request); + CustomizedCrosspointsTraverser.CrosspointsPaths paths = + traverser.crosspointsPaths(sources, patterns, request.capacity, request.limit); + + measure.addIterCount(traverser.vertexIterCounter.get(), + traverser.edgeIterCounter.get()); + + + Iterator iterVertex; + Set vertexIds = new HashSet<>(); if (request.withPath) { - for (HugeTraverser.Path p : paths.paths()) { - ids.addAll(p.vertices()); + for (HugeTraverser.Path path : paths.paths()) { + vertexIds.addAll(path.vertices()); } } else { - ids = paths.crosspoints(); + vertexIds = paths.crosspoints(); } - if (!ids.isEmpty()) { - iter = g.vertices(ids.toArray()); + if (request.withVertex && !vertexIds.isEmpty()) { + iterVertex = g.vertices(vertexIds.toArray()); + measure.addIterCount(vertexIds.size(), 0L); + } else { + iterVertex = vertexIds.iterator(); } - return manager.serializer(g).writeCrosspoints(paths, iter, - request.withPath); - } - private static List - pathPatterns(HugeGraph graph, CrosspointsRequest request) { - int stepSize = request.pathPatterns.size(); - List pathPatterns; - pathPatterns = new ArrayList<>(stepSize); - for (PathPattern pattern : request.pathPatterns) { - CustomizedCrosspointsTraverser.PathPattern pathPattern; - pathPattern = new CustomizedCrosspointsTraverser.PathPattern(); - for (Step step : pattern.steps) { - pathPattern.add(step.jsonToStep(graph)); + Iterator iterEdge = Collections.emptyIterator(); + if (request.withPath) { + Set edges = traverser.edgeResults().getEdges(paths.paths()); + if (request.withEdge) { + iterEdge = edges.iterator(); + } else { + iterEdge = HugeTraverser.EdgeRecord.getEdgeIds(edges).iterator(); } - pathPatterns.add(pathPattern); } - return pathPatterns; + + return manager.serializer(g, measure.measures()) + .writeCrosspoints(paths, iterVertex, + iterEdge, request.withPath); } private static class CrosspointsRequest { @@ -143,14 +160,16 @@ private static class CrosspointsRequest { public boolean withPath = false; @JsonProperty("with_vertex") public boolean withVertex = false; + @JsonProperty("with_edge") + public boolean withEdge = false; @Override public String toString() { return String.format("CrosspointsRequest{sourceVertex=%s," + "pathPatterns=%s,withPath=%s,withVertex=%s," + - "capacity=%s,limit=%s}", this.sources, - this.pathPatterns, this.withPath, - this.withVertex, this.capacity, this.limit); + "capacity=%s,limit=%s,withEdge=%s}", this.sources, + this.pathPatterns, this.withPath, this.withVertex, + this.capacity, this.limit, this.withEdge); } } diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/CustomizedPathsAPI.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/CustomizedPathsAPI.java index 272009ea24..5641e31193 100644 --- a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/CustomizedPathsAPI.java +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/CustomizedPathsAPI.java @@ -30,33 +30,33 @@ import java.util.Map; import java.util.Set; -import io.swagger.v3.oas.annotations.tags.Tag; -import jakarta.inject.Singleton; -import jakarta.ws.rs.Consumes; -import jakarta.ws.rs.POST; -import jakarta.ws.rs.Path; -import jakarta.ws.rs.PathParam; -import jakarta.ws.rs.Produces; -import jakarta.ws.rs.core.Context; - -import org.apache.tinkerpop.gremlin.structure.Vertex; -import org.apache.hugegraph.core.GraphManager; -import org.slf4j.Logger; - import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.api.API; import org.apache.hugegraph.backend.id.Id; -import org.apache.hugegraph.backend.query.QueryResults; +import org.apache.hugegraph.core.GraphManager; import org.apache.hugegraph.traversal.algorithm.CustomizePathsTraverser; import org.apache.hugegraph.traversal.algorithm.HugeTraverser; import org.apache.hugegraph.traversal.algorithm.steps.WeightedEdgeStep; import org.apache.hugegraph.type.define.Directions; import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.Log; +import org.apache.tinkerpop.gremlin.structure.Edge; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.slf4j.Logger; + import com.codahale.metrics.annotation.Timed; import com.fasterxml.jackson.annotation.JsonAlias; import com.fasterxml.jackson.annotation.JsonProperty; +import io.swagger.v3.oas.annotations.tags.Tag; +import jakarta.inject.Singleton; +import jakarta.ws.rs.Consumes; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.core.Context; + @Path("graphs/{graph}/traversers/customizedpaths") @Singleton @Tag(name = "CustomizedPathsAPI") @@ -64,6 +64,16 @@ public class CustomizedPathsAPI extends API { private static final Logger LOG = Log.logger(CustomizedPathsAPI.class); + private static List step(HugeGraph graph, + PathRequest request) { + int stepSize = request.steps.size(); + List steps = new ArrayList<>(stepSize); + for (Step step : request.steps) { + steps.add(step.jsonToStep(graph)); + } + return steps; + } + @POST @Timed @Consumes(APPLICATION_JSON) @@ -81,10 +91,12 @@ public String post(@Context GraphManager manager, } LOG.debug("Graph [{}] get customized paths from source vertex '{}', " + - "with steps '{}', sort by '{}', capacity '{}', limit '{}' " + - "and with_vertex '{}'", graph, request.sources, request.steps, + "with steps '{}', sort by '{}', capacity '{}', limit '{}', " + + "with_vertex '{}' and with_edge '{}'", graph, request.sources, request.steps, request.sortBy, request.capacity, request.limit, - request.withVertex); + request.withVertex, request.withEdge); + + ApiMeasurer measure = new ApiMeasurer(); HugeGraph g = graph(manager, graph); Iterator sources = request.sources.vertices(g); @@ -95,6 +107,8 @@ public String post(@Context GraphManager manager, List paths; paths = traverser.customizedPaths(sources, steps, sorted, request.capacity, request.limit); + measure.addIterCount(traverser.vertexIterCounter.get(), + traverser.edgeIterCounter.get()); if (sorted) { boolean incr = request.sortBy == SortBy.INCR; @@ -102,29 +116,35 @@ public String post(@Context GraphManager manager, request.limit); } - if (!request.withVertex) { - return manager.serializer(g).writePaths("paths", paths, false); + Iterator iterVertex; + Set vertexIds = new HashSet<>(); + for (HugeTraverser.Path path : paths) { + vertexIds.addAll(path.vertices()); } - - Set ids = new HashSet<>(); - for (HugeTraverser.Path p : paths) { - ids.addAll(p.vertices()); + if (request.withVertex && !vertexIds.isEmpty()) { + iterVertex = g.vertices(vertexIds.toArray()); + measure.addIterCount(vertexIds.size(), 0L); + } else { + iterVertex = vertexIds.iterator(); } - Iterator iter = QueryResults.emptyIterator(); - if (!ids.isEmpty()) { - iter = g.vertices(ids.toArray()); + + Iterator iterEdge; + Set edges = traverser.edgeResults().getEdges(paths); + if (request.withEdge && !edges.isEmpty()) { + iterEdge = edges.iterator(); + } else { + iterEdge = HugeTraverser.EdgeRecord.getEdgeIds(edges).iterator(); } - return manager.serializer(g).writePaths("paths", paths, false, iter); + + return manager.serializer(g, measure.measures()) + .writePaths("paths", paths, false, + iterVertex, iterEdge); } - private static List step(HugeGraph graph, - PathRequest req) { - int stepSize = req.steps.size(); - List steps = new ArrayList<>(stepSize); - for (Step step : req.steps) { - steps.add(step.jsonToStep(graph)); - } - return steps; + private enum SortBy { + INCR, + DECR, + NONE } private static class PathRequest { @@ -142,13 +162,16 @@ private static class PathRequest { @JsonProperty("with_vertex") public boolean withVertex = false; + @JsonProperty("with_edge") + public boolean withEdge = false; + @Override public String toString() { return String.format("PathRequest{sourceVertex=%s,steps=%s," + "sortBy=%s,capacity=%s,limit=%s," + - "withVertex=%s}", this.sources, this.steps, + "withVertex=%s,withEdge=%s}", this.sources, this.steps, this.sortBy, this.capacity, this.limit, - this.withVertex); + this.withVertex, this.withEdge); } } @@ -190,10 +213,4 @@ private WeightedEdgeStep jsonToStep(HugeGraph g) { this.defaultWeight, this.sample); } } - - private enum SortBy { - INCR, - DECR, - NONE - } } diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/EdgesAPI.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/EdgesAPI.java index ca4909a552..da9dfe1779 100644 --- a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/EdgesAPI.java +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/EdgesAPI.java @@ -22,32 +22,32 @@ import java.util.Iterator; import java.util.List; -import io.swagger.v3.oas.annotations.tags.Tag; -import jakarta.inject.Singleton; -import jakarta.ws.rs.DefaultValue; -import jakarta.ws.rs.GET; -import jakarta.ws.rs.Path; -import jakarta.ws.rs.PathParam; -import jakarta.ws.rs.Produces; -import jakarta.ws.rs.QueryParam; -import jakarta.ws.rs.core.Context; - -import org.apache.tinkerpop.gremlin.structure.Edge; -import org.apache.hugegraph.core.GraphManager; -import org.slf4j.Logger; - import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.api.API; import org.apache.hugegraph.api.filter.CompressInterceptor.Compress; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.query.ConditionQuery; import org.apache.hugegraph.backend.store.Shard; +import org.apache.hugegraph.core.GraphManager; import org.apache.hugegraph.structure.HugeEdge; import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.Log; +import org.apache.tinkerpop.gremlin.structure.Edge; +import org.slf4j.Logger; + import com.codahale.metrics.annotation.Timed; +import io.swagger.v3.oas.annotations.tags.Tag; +import jakarta.inject.Singleton; +import jakarta.ws.rs.DefaultValue; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.core.Context; + @Path("graphs/{graph}/traversers/edges") @Singleton @Tag(name = "EdgesAPI") diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/FusiformSimilarityAPI.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/FusiformSimilarityAPI.java index fbb330ae12..1b2273dc4a 100644 --- a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/FusiformSimilarityAPI.java +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/FusiformSimilarityAPI.java @@ -23,32 +23,33 @@ import static org.apache.hugegraph.traversal.algorithm.HugeTraverser.NO_LIMIT; import java.util.Iterator; - -import io.swagger.v3.oas.annotations.tags.Tag; -import jakarta.inject.Singleton; -import jakarta.ws.rs.Consumes; -import jakarta.ws.rs.POST; -import jakarta.ws.rs.Path; -import jakarta.ws.rs.PathParam; -import jakarta.ws.rs.Produces; -import jakarta.ws.rs.core.Context; - -import org.apache.tinkerpop.gremlin.structure.Vertex; -import org.apache.tinkerpop.gremlin.structure.util.CloseableIterator; -import org.apache.hugegraph.core.GraphManager; -import org.slf4j.Logger; +import java.util.Set; import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.api.API; -import org.apache.hugegraph.backend.query.QueryResults; +import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.core.GraphManager; import org.apache.hugegraph.traversal.algorithm.FusiformSimilarityTraverser; import org.apache.hugegraph.traversal.algorithm.FusiformSimilarityTraverser.SimilarsMap; import org.apache.hugegraph.type.define.Directions; import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.Log; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.apache.tinkerpop.gremlin.structure.util.CloseableIterator; +import org.slf4j.Logger; + import com.codahale.metrics.annotation.Timed; import com.fasterxml.jackson.annotation.JsonProperty; +import io.swagger.v3.oas.annotations.tags.Tag; +import jakarta.inject.Singleton; +import jakarta.ws.rs.Consumes; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.core.Context; + @Path("graphs/{graph}/traversers/fusiformsimilarity") @Singleton @Tag(name = "FusiformSimilarityAPI") @@ -64,7 +65,7 @@ public String post(@Context GraphManager manager, @PathParam("graph") String graph, FusiformSimilarityRequest request) { E.checkArgumentNotNull(request, "The fusiform similarity " + - "request body can't be null"); + "request body can't be null"); E.checkArgumentNotNull(request.sources, "The sources of fusiform similarity " + "request can't be null"); @@ -94,28 +95,37 @@ public String post(@Context GraphManager manager, request.minNeighbors, request.alpha, request.minSimilars, request.groupProperty, request.minGroups); + ApiMeasurer measure = new ApiMeasurer(); HugeGraph g = graph(manager, graph); Iterator sources = request.sources.vertices(g); E.checkArgument(sources != null && sources.hasNext(), "The source vertices can't be empty"); - FusiformSimilarityTraverser traverser = - new FusiformSimilarityTraverser(g); + FusiformSimilarityTraverser traverser = new FusiformSimilarityTraverser(g); SimilarsMap result = traverser.fusiformSimilarity( - sources, request.direction, request.label, - request.minNeighbors, request.alpha, - request.minSimilars, request.top, - request.groupProperty, request.minGroups, - request.maxDegree, request.capacity, - request.limit, request.withIntermediary); + sources, request.direction, request.label, + request.minNeighbors, request.alpha, + request.minSimilars, request.top, + request.groupProperty, request.minGroups, + request.maxDegree, request.capacity, + request.limit, request.withIntermediary); CloseableIterator.closeIterator(sources); - Iterator iterator = QueryResults.emptyIterator(); - if (request.withVertex && !result.isEmpty()) { - iterator = g.vertices(result.vertices().toArray()); + measure.addIterCount(traverser.vertexIterCounter.get(), + traverser.edgeIterCounter.get()); + + Iterator iterVertex; + Set vertexIds = result.vertices(); + if (request.withVertex && !vertexIds.isEmpty()) { + iterVertex = g.vertices(vertexIds.toArray()); + measure.addIterCount(vertexIds.size(), 0); + } else { + iterVertex = vertexIds.iterator(); } - return manager.serializer(g).writeSimilars(result, iterator); + + return manager.serializer(g, measure.measures()) + .writeSimilars(result, iterVertex); } private static class FusiformSimilarityRequest { diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/JaccardSimilarityAPI.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/JaccardSimilarityAPI.java index ff187a5918..d5de80351f 100644 --- a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/JaccardSimilarityAPI.java +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/JaccardSimilarityAPI.java @@ -18,41 +18,40 @@ package org.apache.hugegraph.api.traversers; import static org.apache.hugegraph.traversal.algorithm.HugeTraverser.DEFAULT_CAPACITY; -import static org.apache.hugegraph.traversal.algorithm.HugeTraverser.DEFAULT_MAX_DEGREE; import static org.apache.hugegraph.traversal.algorithm.HugeTraverser.DEFAULT_LIMIT; +import static org.apache.hugegraph.traversal.algorithm.HugeTraverser.DEFAULT_MAX_DEGREE; import java.util.Map; -import io.swagger.v3.oas.annotations.tags.Tag; -import jakarta.inject.Singleton; -import jakarta.ws.rs.Consumes; -import jakarta.ws.rs.DefaultValue; -import jakarta.ws.rs.GET; -import jakarta.ws.rs.POST; -import jakarta.ws.rs.Path; -import jakarta.ws.rs.PathParam; -import jakarta.ws.rs.Produces; -import jakarta.ws.rs.QueryParam; -import jakarta.ws.rs.core.Context; - -import org.apache.hugegraph.core.GraphManager; -import org.slf4j.Logger; - import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.api.graph.EdgeAPI; import org.apache.hugegraph.api.graph.VertexAPI; import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.core.GraphManager; import org.apache.hugegraph.structure.HugeVertex; -import org.apache.hugegraph.traversal.algorithm.steps.EdgeStep; import org.apache.hugegraph.traversal.algorithm.JaccardSimilarTraverser; +import org.apache.hugegraph.traversal.algorithm.steps.EdgeStep; import org.apache.hugegraph.type.define.Directions; import org.apache.hugegraph.util.E; -import org.apache.hugegraph.util.JsonUtil; import org.apache.hugegraph.util.Log; +import org.slf4j.Logger; + import com.codahale.metrics.annotation.Timed; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.ImmutableMap; +import io.swagger.v3.oas.annotations.tags.Tag; +import jakarta.inject.Singleton; +import jakarta.ws.rs.Consumes; +import jakarta.ws.rs.DefaultValue; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.core.Context; + @Path("graphs/{graph}/traversers/jaccardsimilarity") @Singleton @Tag(name = "JaccardSimilarityAPI") @@ -75,6 +74,8 @@ public String get(@Context GraphManager manager, "with direction {}, edge label {} and max degree '{}'", graph, vertex, other, direction, edgeLabel, maxDegree); + ApiMeasurer measure = new ApiMeasurer(); + Id sourceId = VertexAPI.checkAndParseVertexId(vertex); Id targetId = VertexAPI.checkAndParseVertexId(other); Directions dir = Directions.convert(EdgeAPI.parseDirection(direction)); @@ -82,12 +83,15 @@ public String get(@Context GraphManager manager, HugeGraph g = graph(manager, graph); double similarity; try (JaccardSimilarTraverser traverser = - new JaccardSimilarTraverser(g)) { + new JaccardSimilarTraverser(g)) { similarity = traverser.jaccardSimilarity(sourceId, targetId, dir, edgeLabel, maxDegree); + measure.addIterCount(traverser.vertexIterCounter.get(), + traverser.edgeIterCounter.get()); } - return JsonUtil.toJson(ImmutableMap.of("jaccard_similarity", - similarity)); + + return manager.serializer(g, measure.measures()) + .writeMap(ImmutableMap.of("jaccard_similarity", similarity)); } @POST @@ -110,6 +114,8 @@ public String post(@Context GraphManager manager, graph, request.vertex, request.step, request.top, request.capacity); + ApiMeasurer measure = new ApiMeasurer(); + HugeGraph g = graph(manager, graph); Id sourceId = HugeVertex.getIdValue(request.vertex); @@ -117,11 +123,14 @@ public String post(@Context GraphManager manager, Map results; try (JaccardSimilarTraverser traverser = - new JaccardSimilarTraverser(g)) { + new JaccardSimilarTraverser(g)) { results = traverser.jaccardSimilars(sourceId, step, request.top, request.capacity); + measure.addIterCount(traverser.vertexIterCounter.get(), + traverser.edgeIterCounter.get()); } - return manager.serializer(g).writeMap(results); + return manager.serializer(g, measure.measures()) + .writeMap(ImmutableMap.of("jaccard_similarity", results)); } private static class Request { diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/KneighborAPI.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/KneighborAPI.java index 4a7c0a9515..a0e7d0c4ee 100644 --- a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/KneighborAPI.java +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/KneighborAPI.java @@ -21,6 +21,7 @@ import static org.apache.hugegraph.traversal.algorithm.HugeTraverser.DEFAULT_MAX_DEGREE; import static org.apache.hugegraph.traversal.algorithm.HugeTraverser.NO_LIMIT; +import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.List; @@ -40,12 +41,13 @@ import org.apache.hugegraph.type.define.Directions; import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.Log; -import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.apache.tinkerpop.gremlin.structure.Edge; import org.slf4j.Logger; import com.codahale.metrics.annotation.Timed; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; import io.swagger.v3.oas.annotations.tags.Tag; import jakarta.inject.Singleton; @@ -75,6 +77,8 @@ public String get(@Context GraphManager manager, @QueryParam("direction") String direction, @QueryParam("label") String edgeLabel, @QueryParam("max_depth") int depth, + @QueryParam("count_only") + @DefaultValue("false") boolean countOnly, @QueryParam("max_degree") @DefaultValue(DEFAULT_MAX_DEGREE) long maxDegree, @QueryParam("limit") @@ -85,6 +89,8 @@ public String get(@Context GraphManager manager, graph, sourceV, direction, edgeLabel, depth, maxDegree, limit); + ApiMeasurer measure = new ApiMeasurer(); + Id source = VertexAPI.checkAndParseVertexId(sourceV); Directions dir = Directions.convert(EdgeAPI.parseDirection(direction)); @@ -94,8 +100,14 @@ public String get(@Context GraphManager manager, try (KneighborTraverser traverser = new KneighborTraverser(g)) { ids = traverser.kneighbor(source, dir, edgeLabel, depth, maxDegree, limit); + measure.addIterCount(traverser.vertexIterCounter.get(), + traverser.edgeIterCounter.get()); + } + if (countOnly) { + return manager.serializer(g, measure.measures()) + .writeMap(ImmutableMap.of("vertices_size", ids.size())); } - return manager.serializer(g).writeList("vertices", ids); + return manager.serializer(g, measure.measures()).writeList("vertices", ids); } @POST @@ -111,15 +123,18 @@ public String post(@Context GraphManager manager, E.checkArgument(request.step != null, "The steps of request can't be null"); if (request.countOnly) { - E.checkArgument(!request.withVertex && !request.withPath, - "Can't return vertex or path when count only"); + E.checkArgument(!request.withVertex && !request.withPath && !request.withEdge, + "Can't return vertex, edge or path when count only"); } LOG.debug("Graph [{}] get customized kneighbor from source vertex " + "'{}', with step '{}', limit '{}', count_only '{}', " + - "with_vertex '{}' and with_path '{}'", + "with_vertex '{}', with_path '{}' and with_edge '{}'", graph, request.source, request.step, request.limit, - request.countOnly, request.withVertex, request.withPath); + request.countOnly, request.withVertex, request.withPath, + request.withEdge); + + ApiMeasurer measure = new ApiMeasurer(); HugeGraph g = graph(manager, graph); Id sourceId = HugeVertex.getIdValue(request.source); @@ -131,6 +146,8 @@ public String post(@Context GraphManager manager, results = traverser.customizedKneighbor(sourceId, step, request.maxDepth, request.limit); + measure.addIterCount(traverser.vertexIterCounter.get(), + traverser.edgeIterCounter.get()); } long size = results.size(); @@ -144,20 +161,41 @@ public String post(@Context GraphManager manager, if (request.withPath) { paths.addAll(results.paths(request.limit)); } - Iterator iter = QueryResults.emptyIterator(); - if (request.withVertex && !request.countOnly) { - Set ids = new HashSet<>(neighbors); - if (request.withPath) { - for (HugeTraverser.Path p : paths) { - ids.addAll(p.vertices()); - } + + if (request.countOnly) { + return manager.serializer(g, measure.measures()) + .writeNodesWithPath("kneighbor", neighbors, size, paths, + QueryResults.emptyIterator(), + QueryResults.emptyIterator()); + } + + Iterator iterVertex; + Set vertexIds = new HashSet<>(neighbors); + if (request.withPath) { + for (HugeTraverser.Path p : paths) { + vertexIds.addAll(p.vertices()); } - if (!ids.isEmpty()) { - iter = g.vertices(ids.toArray()); + } + if (request.withVertex && !vertexIds.isEmpty()) { + iterVertex = g.vertices(vertexIds.toArray()); + measure.addIterCount(vertexIds.size(), 0L); + } else { + iterVertex = vertexIds.iterator(); + } + + Iterator iterEdge = Collections.emptyIterator(); + if (request.withPath) { + Set edges = results.edgeResults().getEdges(paths); + if (request.withEdge) { + iterEdge = edges.iterator(); + } else { + iterEdge = HugeTraverser.EdgeRecord.getEdgeIds(edges).iterator(); } } - return manager.serializer(g).writeNodesWithPath("kneighbor", neighbors, - size, paths, iter); + + return manager.serializer(g, measure.measures()) + .writeNodesWithPath("kneighbor", neighbors, + size, paths, iterVertex, iterEdge); } private static class Request { @@ -176,14 +214,16 @@ private static class Request { public boolean withVertex = false; @JsonProperty("with_path") public boolean withPath = false; + @JsonProperty("with_edge") + public boolean withEdge = false; @Override public String toString() { return String.format("PathRequest{source=%s,step=%s,maxDepth=%s" + "limit=%s,countOnly=%s,withVertex=%s," + - "withPath=%s}", this.source, this.step, + "withPath=%s,withEdge=%s}", this.source, this.step, this.maxDepth, this.limit, this.countOnly, - this.withVertex, this.withPath); + this.withVertex, this.withPath, this.withEdge); } } } diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/KoutAPI.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/KoutAPI.java index 30282be9d6..1adf2be5eb 100644 --- a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/KoutAPI.java +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/KoutAPI.java @@ -22,6 +22,7 @@ import static org.apache.hugegraph.traversal.algorithm.HugeTraverser.DEFAULT_MAX_DEGREE; import static org.apache.hugegraph.traversal.algorithm.HugeTraverser.NO_LIMIT; +import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.List; @@ -41,12 +42,13 @@ import org.apache.hugegraph.type.define.Directions; import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.Log; -import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.apache.tinkerpop.gremlin.structure.Edge; import org.slf4j.Logger; import com.codahale.metrics.annotation.Timed; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; import io.swagger.v3.oas.annotations.tags.Tag; import jakarta.inject.Singleton; @@ -78,6 +80,8 @@ public String get(@Context GraphManager manager, @QueryParam("max_depth") int depth, @QueryParam("nearest") @DefaultValue("true") boolean nearest, + @QueryParam("count_only") + @DefaultValue("false") boolean count_only, @QueryParam("max_degree") @DefaultValue(DEFAULT_MAX_DEGREE) long maxDegree, @QueryParam("capacity") @@ -87,8 +91,10 @@ public String get(@Context GraphManager manager, LOG.debug("Graph [{}] get k-out from '{}' with " + "direction '{}', edge label '{}', max depth '{}', nearest " + "'{}', max degree '{}', capacity '{}' and limit '{}'", - graph, source, direction, edgeLabel, depth, nearest, - maxDegree, capacity, limit); + graph, source, direction, edgeLabel, depth, + nearest, maxDegree, capacity, limit); + + ApiMeasurer measure = new ApiMeasurer(); Id sourceId = VertexAPI.checkAndParseVertexId(source); Directions dir = Directions.convert(EdgeAPI.parseDirection(direction)); @@ -99,8 +105,15 @@ public String get(@Context GraphManager manager, try (KoutTraverser traverser = new KoutTraverser(g)) { ids = traverser.kout(sourceId, dir, edgeLabel, depth, nearest, maxDegree, capacity, limit); + measure.addIterCount(traverser.vertexIterCounter.get(), + traverser.edgeIterCounter.get()); + } + + if (count_only) { + return manager.serializer(g, measure.measures()) + .writeMap(ImmutableMap.of("vertices_size", ids.size())); } - return manager.serializer(g).writeList("vertices", ids); + return manager.serializer(g, measure.measures()).writeList("vertices", ids); } @POST @@ -116,23 +129,25 @@ public String post(@Context GraphManager manager, E.checkArgument(request.step != null, "The steps of request can't be null"); if (request.countOnly) { - E.checkArgument(!request.withVertex && !request.withPath, - "Can't return vertex or path when count only"); + E.checkArgument(!request.withVertex && !request.withPath && !request.withEdge, + "Can't return vertex, edge or path when count only"); } LOG.debug("Graph [{}] get customized kout from source vertex '{}', " + "with step '{}', max_depth '{}', nearest '{}', " + "count_only '{}', capacity '{}', limit '{}', " + - "with_vertex '{}' and with_path '{}'", + "with_vertex '{}', with_path '{}' and with_edge '{}'", graph, request.source, request.step, request.maxDepth, request.nearest, request.countOnly, request.capacity, - request.limit, request.withVertex, request.withPath); + request.limit, request.withVertex, request.withPath, + request.withEdge); + + ApiMeasurer measure = new ApiMeasurer(); HugeGraph g = graph(manager, graph); Id sourceId = HugeVertex.getIdValue(request.source); EdgeStep step = step(g, request.step); - KoutRecords results; try (KoutTraverser traverser = new KoutTraverser(g)) { results = traverser.customizedKout(sourceId, step, @@ -140,8 +155,9 @@ public String post(@Context GraphManager manager, request.nearest, request.capacity, request.limit); + measure.addIterCount(traverser.vertexIterCounter.get(), + traverser.edgeIterCounter.get()); } - long size = results.size(); if (request.limit != NO_LIMIT && size > request.limit) { size = request.limit; @@ -154,20 +170,40 @@ public String post(@Context GraphManager manager, paths.addAll(results.paths(request.limit)); } - Iterator iter = QueryResults.emptyIterator(); - if (request.withVertex && !request.countOnly) { - Set ids = new HashSet<>(neighbors); - if (request.withPath) { - for (HugeTraverser.Path p : paths) { - ids.addAll(p.vertices()); - } + if (request.countOnly) { + return manager.serializer(g, measure.measures()) + .writeNodesWithPath("kneighbor", neighbors, size, paths, + QueryResults.emptyIterator(), + QueryResults.emptyIterator()); + } + + Iterator iterVertex; + Set vertexIds = new HashSet<>(neighbors); + if (request.withPath) { + for (HugeTraverser.Path p : results.paths(request.limit)) { + vertexIds.addAll(p.vertices()); } - if (!ids.isEmpty()) { - iter = g.vertices(ids.toArray()); + } + if (request.withVertex && !vertexIds.isEmpty()) { + iterVertex = g.vertices(vertexIds.toArray()); + measure.addIterCount(vertexIds.size(), 0L); + } else { + iterVertex = vertexIds.iterator(); + } + + Iterator iterEdge = Collections.emptyIterator(); + if (request.withPath) { + Set edges = results.edgeResults().getEdges(paths); + if (request.withEdge) { + iterEdge = edges.iterator(); + } else { + iterEdge = HugeTraverser.EdgeRecord.getEdgeIds(edges).iterator(); } } - return manager.serializer(g).writeNodesWithPath("kout", neighbors, - size, paths, iter); + + return manager.serializer(g, measure.measures()) + .writeNodesWithPath("kout", neighbors, size, paths, + iterVertex, iterEdge); } private static class Request { @@ -190,15 +226,18 @@ private static class Request { public boolean withVertex = false; @JsonProperty("with_path") public boolean withPath = false; + @JsonProperty("with_edge") + public boolean withEdge = false; @Override public String toString() { return String.format("KoutRequest{source=%s,step=%s,maxDepth=%s" + "nearest=%s,countOnly=%s,capacity=%s," + - "limit=%s,withVertex=%s,withPath=%s}", - this.source, this.step, this.maxDepth, - this.nearest, this.countOnly, this.capacity, - this.limit, this.withVertex, this.withPath); + "limit=%s,withVertex=%s,withPath=%s," + + "withEdge=%s}", this.source, this.step, + this.maxDepth, this.nearest, this.countOnly, + this.capacity, this.limit, this.withVertex, + this.withPath, this.withEdge); } } } diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/MultiNodeShortestPathAPI.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/MultiNodeShortestPathAPI.java index 81c38e65c9..588940abb7 100644 --- a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/MultiNodeShortestPathAPI.java +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/MultiNodeShortestPathAPI.java @@ -24,30 +24,30 @@ import java.util.List; import java.util.Set; -import io.swagger.v3.oas.annotations.tags.Tag; -import jakarta.inject.Singleton; -import jakarta.ws.rs.Consumes; -import jakarta.ws.rs.POST; -import jakarta.ws.rs.Path; -import jakarta.ws.rs.PathParam; -import jakarta.ws.rs.Produces; -import jakarta.ws.rs.core.Context; - -import org.apache.tinkerpop.gremlin.structure.Vertex; -import org.apache.hugegraph.core.GraphManager; -import org.slf4j.Logger; - import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.backend.id.Id; -import org.apache.hugegraph.backend.query.QueryResults; -import org.apache.hugegraph.traversal.algorithm.steps.EdgeStep; +import org.apache.hugegraph.core.GraphManager; import org.apache.hugegraph.traversal.algorithm.HugeTraverser; import org.apache.hugegraph.traversal.algorithm.MultiNodeShortestPathTraverser; +import org.apache.hugegraph.traversal.algorithm.steps.EdgeStep; import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.Log; +import org.apache.tinkerpop.gremlin.structure.Edge; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.slf4j.Logger; + import com.codahale.metrics.annotation.Timed; import com.fasterxml.jackson.annotation.JsonProperty; +import io.swagger.v3.oas.annotations.tags.Tag; +import jakarta.inject.Singleton; +import jakarta.ws.rs.Consumes; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.core.Context; + @Path("graphs/{graph}/traversers/multinodeshortestpath") @Singleton @Tag(name = "MultiNodeShortestPathAPI") @@ -74,32 +74,48 @@ public String post(@Context GraphManager manager, graph, request.vertices, request.step, request.maxDepth, request.capacity, request.withVertex); + ApiMeasurer measure = new ApiMeasurer(); + HugeGraph g = graph(manager, graph); Iterator vertices = request.vertices.vertices(g); EdgeStep step = step(g, request.step); - List paths; + MultiNodeShortestPathTraverser.WrappedListPath wrappedListPath; try (MultiNodeShortestPathTraverser traverser = - new MultiNodeShortestPathTraverser(g)) { - paths = traverser.multiNodeShortestPath(vertices, step, - request.maxDepth, - request.capacity); + new MultiNodeShortestPathTraverser(g)) { + wrappedListPath = traverser.multiNodeShortestPath(vertices, step, + request.maxDepth, + request.capacity); + measure.addIterCount(traverser.vertexIterCounter.get(), + traverser.edgeIterCounter.get()); } - if (!request.withVertex) { - return manager.serializer(g).writePaths("paths", paths, false); - } + List paths = wrappedListPath.paths(); - Set ids = new HashSet<>(); - for (HugeTraverser.Path p : paths) { - ids.addAll(p.vertices()); + Iterator iterVertex; + Set vertexIds = new HashSet<>(); + for (HugeTraverser.Path path : paths) { + vertexIds.addAll(path.vertices()); + } + if (request.withVertex && !vertexIds.isEmpty()) { + iterVertex = g.vertices(vertexIds.toArray()); + measure.addIterCount(vertexIds.size(), 0L); + } else { + iterVertex = vertexIds.iterator(); } - Iterator iter = QueryResults.emptyIterator(); - if (!ids.isEmpty()) { - iter = g.vertices(ids.toArray()); + + Iterator iterEdge; + Set edges = wrappedListPath.edges(); + if (request.withEdge && !edges.isEmpty()) { + iterEdge = wrappedListPath.edges().iterator(); + } else { + iterEdge = HugeTraverser.EdgeRecord.getEdgeIds(edges).iterator(); } - return manager.serializer(g).writePaths("paths", paths, false, iter); + + return manager.serializer(g, measure.measures()) + .writePaths("paths", paths, + false, iterVertex, iterEdge); } private static class Request { @@ -114,13 +130,15 @@ private static class Request { public long capacity = Long.parseLong(DEFAULT_CAPACITY); @JsonProperty("with_vertex") public boolean withVertex = false; + @JsonProperty("with_edge") + public boolean withEdge = false; @Override public String toString() { return String.format("Request{vertices=%s,step=%s,maxDepth=%s" + - "capacity=%s,withVertex=%s}", + "capacity=%s,withVertex=%s,withEdge=%s}", this.vertices, this.step, this.maxDepth, - this.capacity, this.withVertex); + this.capacity, this.withVertex, this.withEdge); } } } diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/PathsAPI.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/PathsAPI.java index 6e18c9a1c2..50bca7f75b 100644 --- a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/PathsAPI.java +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/PathsAPI.java @@ -27,27 +27,11 @@ import java.util.Iterator; import java.util.Set; -import io.swagger.v3.oas.annotations.tags.Tag; -import jakarta.inject.Singleton; -import jakarta.ws.rs.Consumes; -import jakarta.ws.rs.DefaultValue; -import jakarta.ws.rs.GET; -import jakarta.ws.rs.POST; -import jakarta.ws.rs.Path; -import jakarta.ws.rs.PathParam; -import jakarta.ws.rs.Produces; -import jakarta.ws.rs.QueryParam; -import jakarta.ws.rs.core.Context; - -import org.apache.tinkerpop.gremlin.structure.Vertex; -import org.apache.hugegraph.core.GraphManager; -import org.slf4j.Logger; - import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.api.graph.EdgeAPI; import org.apache.hugegraph.api.graph.VertexAPI; import org.apache.hugegraph.backend.id.Id; -import org.apache.hugegraph.backend.query.QueryResults; +import org.apache.hugegraph.core.GraphManager; import org.apache.hugegraph.traversal.algorithm.CollectionPathsTraverser; import org.apache.hugegraph.traversal.algorithm.HugeTraverser; import org.apache.hugegraph.traversal.algorithm.PathsTraverser; @@ -55,9 +39,25 @@ import org.apache.hugegraph.type.define.Directions; import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.Log; +import org.apache.tinkerpop.gremlin.structure.Edge; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.slf4j.Logger; + import com.codahale.metrics.annotation.Timed; import com.fasterxml.jackson.annotation.JsonProperty; +import io.swagger.v3.oas.annotations.tags.Tag; +import jakarta.inject.Singleton; +import jakarta.ws.rs.Consumes; +import jakarta.ws.rs.DefaultValue; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.core.Context; + @Path("graphs/{graph}/traversers/paths") @Singleton @Tag(name = "PathsAPI") @@ -87,6 +87,8 @@ public String get(@Context GraphManager manager, graph, source, target, direction, edgeLabel, depth, maxDegree, capacity, limit); + ApiMeasurer measure = new ApiMeasurer(); + Id sourceId = VertexAPI.checkAndParseVertexId(source); Id targetId = VertexAPI.checkAndParseVertexId(target); Directions dir = Directions.convert(EdgeAPI.parseDirection(direction)); @@ -97,7 +99,10 @@ public String get(@Context GraphManager manager, dir.opposite(), edgeLabel, depth, maxDegree, capacity, limit); - return manager.serializer(g).writePaths("paths", paths, false); + measure.addIterCount(traverser.vertexIterCounter.get(), + traverser.edgeIterCounter.get()); + return manager.serializer(g, measure.measures()) + .writePaths("paths", paths, false); } @POST @@ -120,10 +125,12 @@ public String post(@Context GraphManager manager, LOG.debug("Graph [{}] get paths from source vertices '{}', target " + "vertices '{}', with step '{}', max depth '{}', " + - "capacity '{}', limit '{}' and with_vertex '{}'", + "capacity '{}', limit '{}', with_vertex '{}' and with_edge '{}'", graph, request.sources, request.targets, request.step, request.depth, request.capacity, request.limit, - request.withVertex); + request.withVertex, request.withEdge); + + ApiMeasurer measure = new ApiMeasurer(); HugeGraph g = graph(manager, graph); Iterator sources = request.sources.vertices(g); @@ -131,24 +138,38 @@ public String post(@Context GraphManager manager, EdgeStep step = step(g, request.step); CollectionPathsTraverser traverser = new CollectionPathsTraverser(g); - Collection paths; - paths = traverser.paths(sources, targets, step, request.depth, - request.nearest, request.capacity, - request.limit); - - if (!request.withVertex) { - return manager.serializer(g).writePaths("paths", paths, false); + CollectionPathsTraverser.WrappedPathCollection + wrappedPathCollection = traverser.paths(sources, targets, + step, request.depth, + request.nearest, request.capacity, + request.limit); + Collection paths = wrappedPathCollection.paths(); + measure.addIterCount(traverser.vertexIterCounter.get(), + traverser.edgeIterCounter.get()); + + Iterator iterVertex; + Set vertexIds = new HashSet<>(); + for (HugeTraverser.Path path : paths) { + vertexIds.addAll(path.vertices()); } - - Set ids = new HashSet<>(); - for (HugeTraverser.Path p : paths) { - ids.addAll(p.vertices()); + if (request.withVertex && !vertexIds.isEmpty()) { + iterVertex = g.vertices(vertexIds.toArray()); + measure.addIterCount(vertexIds.size(), 0L); + } else { + iterVertex = vertexIds.iterator(); } - Iterator iter = QueryResults.emptyIterator(); - if (!ids.isEmpty()) { - iter = g.vertices(ids.toArray()); + + Iterator iterEdge; + Set edges = wrappedPathCollection.edges(); + if (request.withEdge && !edges.isEmpty()) { + iterEdge = edges.iterator(); + } else { + iterEdge = HugeTraverser.EdgeRecord.getEdgeIds(edges).iterator(); } - return manager.serializer(g).writePaths("paths", paths, false, iter); + + return manager.serializer(g, measure.measures()) + .writePaths("paths", paths, false, + iterVertex, iterEdge); } private static class Request { @@ -170,14 +191,17 @@ private static class Request { @JsonProperty("with_vertex") public boolean withVertex = false; + @JsonProperty("with_edge") + public boolean withEdge = false; + @Override public String toString() { return String.format("PathRequest{sources=%s,targets=%s,step=%s," + "maxDepth=%s,nearest=%s,capacity=%s," + - "limit=%s,withVertex=%s}", this.sources, - this.targets, this.step, this.depth, - this.nearest, this.capacity, - this.limit, this.withVertex); + "limit=%s,withVertex=%s,withEdge=%s}", + this.sources, this.targets, this.step, + this.depth, this.nearest, this.capacity, + this.limit, this.withVertex, this.withEdge); } } } diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/RaysAPI.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/RaysAPI.java index c841412cae..28ded20e60 100644 --- a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/RaysAPI.java +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/RaysAPI.java @@ -21,30 +21,35 @@ import static org.apache.hugegraph.traversal.algorithm.HugeTraverser.DEFAULT_MAX_DEGREE; import static org.apache.hugegraph.traversal.algorithm.HugeTraverser.DEFAULT_PATHS_LIMIT; -import io.swagger.v3.oas.annotations.tags.Tag; -import jakarta.inject.Singleton; -import jakarta.ws.rs.DefaultValue; -import jakarta.ws.rs.GET; -import jakarta.ws.rs.Path; -import jakarta.ws.rs.PathParam; -import jakarta.ws.rs.Produces; -import jakarta.ws.rs.QueryParam; -import jakarta.ws.rs.core.Context; - -import org.apache.hugegraph.core.GraphManager; -import org.slf4j.Logger; +import java.util.HashSet; +import java.util.Iterator; +import java.util.Set; import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.api.API; import org.apache.hugegraph.api.graph.EdgeAPI; import org.apache.hugegraph.api.graph.VertexAPI; import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.core.GraphManager; import org.apache.hugegraph.traversal.algorithm.HugeTraverser; import org.apache.hugegraph.traversal.algorithm.SubGraphTraverser; import org.apache.hugegraph.type.define.Directions; import org.apache.hugegraph.util.Log; +import org.apache.tinkerpop.gremlin.structure.Edge; +import org.slf4j.Logger; + import com.codahale.metrics.annotation.Timed; +import io.swagger.v3.oas.annotations.tags.Tag; +import jakarta.inject.Singleton; +import jakarta.ws.rs.DefaultValue; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.core.Context; + @Path("graphs/{graph}/traversers/rays") @Singleton @Tag(name = "RaysAPI") @@ -66,12 +71,17 @@ public String get(@Context GraphManager manager, @QueryParam("capacity") @DefaultValue(DEFAULT_CAPACITY) long capacity, @QueryParam("limit") - @DefaultValue(DEFAULT_PATHS_LIMIT) int limit) { + @DefaultValue(DEFAULT_PATHS_LIMIT) int limit, + @QueryParam("with_vertex") + @DefaultValue("false") boolean withVertex, + @QueryParam("with_edge") + @DefaultValue("false") boolean withEdge) { LOG.debug("Graph [{}] get rays paths from '{}' with " + "direction '{}', edge label '{}', max depth '{}', " + "max degree '{}', capacity '{}' and limit '{}'", graph, sourceV, direction, edgeLabel, depth, maxDegree, capacity, limit); + ApiMeasurer measure = new ApiMeasurer(); Id source = VertexAPI.checkAndParseVertexId(sourceV); Directions dir = Directions.convert(EdgeAPI.parseDirection(direction)); @@ -80,8 +90,33 @@ public String get(@Context GraphManager manager, SubGraphTraverser traverser = new SubGraphTraverser(g); HugeTraverser.PathSet paths = traverser.rays(source, dir, edgeLabel, - depth, maxDegree, - capacity, limit); - return manager.serializer(g).writePaths("rays", paths, false); + depth, maxDegree, capacity, + limit); + measure.addIterCount(traverser.vertexIterCounter.get(), + traverser.edgeIterCounter.get()); + + Iterator iterVertex; + Set vertexIds = new HashSet<>(); + for (HugeTraverser.Path path : paths) { + vertexIds.addAll(path.vertices()); + } + if (withVertex && !vertexIds.isEmpty()) { + iterVertex = g.vertices(vertexIds.toArray()); + measure.addIterCount(vertexIds.size(), 0L); + } else { + iterVertex = vertexIds.iterator(); + } + + Iterator iterEdge; + Set edges = paths.getEdges(); + if (withEdge && !edges.isEmpty()) { + iterEdge = edges.iterator(); + } else { + iterEdge = HugeTraverser.EdgeRecord.getEdgeIds(edges).iterator(); + } + + return manager.serializer(g, measure.measures()) + .writePaths("rays", paths, false, + iterVertex, iterEdge); } } diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/RingsAPI.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/RingsAPI.java index 67dfe7ab72..3a44fd85a1 100644 --- a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/RingsAPI.java +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/RingsAPI.java @@ -21,30 +21,35 @@ import static org.apache.hugegraph.traversal.algorithm.HugeTraverser.DEFAULT_MAX_DEGREE; import static org.apache.hugegraph.traversal.algorithm.HugeTraverser.DEFAULT_PATHS_LIMIT; -import io.swagger.v3.oas.annotations.tags.Tag; -import jakarta.inject.Singleton; -import jakarta.ws.rs.DefaultValue; -import jakarta.ws.rs.GET; -import jakarta.ws.rs.Path; -import jakarta.ws.rs.PathParam; -import jakarta.ws.rs.Produces; -import jakarta.ws.rs.QueryParam; -import jakarta.ws.rs.core.Context; - -import org.apache.hugegraph.core.GraphManager; -import org.slf4j.Logger; +import java.util.HashSet; +import java.util.Iterator; +import java.util.Set; import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.api.API; import org.apache.hugegraph.api.graph.EdgeAPI; import org.apache.hugegraph.api.graph.VertexAPI; import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.core.GraphManager; import org.apache.hugegraph.traversal.algorithm.HugeTraverser; import org.apache.hugegraph.traversal.algorithm.SubGraphTraverser; import org.apache.hugegraph.type.define.Directions; import org.apache.hugegraph.util.Log; +import org.apache.tinkerpop.gremlin.structure.Edge; +import org.slf4j.Logger; + import com.codahale.metrics.annotation.Timed; +import io.swagger.v3.oas.annotations.tags.Tag; +import jakarta.inject.Singleton; +import jakarta.ws.rs.DefaultValue; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.core.Context; + @Path("graphs/{graph}/traversers/rings") @Singleton @Tag(name = "RingsAPI") @@ -68,14 +73,19 @@ public String get(@Context GraphManager manager, @QueryParam("capacity") @DefaultValue(DEFAULT_CAPACITY) long capacity, @QueryParam("limit") - @DefaultValue(DEFAULT_PATHS_LIMIT) int limit) { + @DefaultValue(DEFAULT_PATHS_LIMIT) int limit, + @QueryParam("with_vertex") + @DefaultValue("false") boolean withVertex, + @QueryParam("with_edge") + @DefaultValue("false") boolean withEdge) { LOG.debug("Graph [{}] get rings paths reachable from '{}' with " + "direction '{}', edge label '{}', max depth '{}', " + - "source in ring '{}', max degree '{}', capacity '{}' " + - "and limit '{}'", + "source in ring '{}', max degree '{}', capacity '{}', " + + "limit '{}', with_vertex '{}' and with_edge '{}'", graph, sourceV, direction, edgeLabel, depth, sourceInRing, - maxDegree, capacity, limit); + maxDegree, capacity, limit, withVertex, withEdge); + ApiMeasurer measure = new ApiMeasurer(); Id source = VertexAPI.checkAndParseVertexId(sourceV); Directions dir = Directions.convert(EdgeAPI.parseDirection(direction)); @@ -83,8 +93,34 @@ public String get(@Context GraphManager manager, SubGraphTraverser traverser = new SubGraphTraverser(g); HugeTraverser.PathSet paths = traverser.rings(source, dir, edgeLabel, - depth, sourceInRing, - maxDegree, capacity, limit); - return manager.serializer(g).writePaths("rings", paths, false); + depth, sourceInRing, maxDegree, + capacity, limit); + + measure.addIterCount(traverser.vertexIterCounter.get(), + traverser.edgeIterCounter.get()); + + Iterator iterVertex; + Set vertexIds = new HashSet<>(); + for (HugeTraverser.Path path : paths) { + vertexIds.addAll(path.vertices()); + } + if (withVertex && !vertexIds.isEmpty()) { + iterVertex = g.vertices(vertexIds.toArray()); + measure.addIterCount(vertexIds.size(), 0L); + } else { + iterVertex = vertexIds.iterator(); + } + + Iterator iterEdge; + Set edges = paths.getEdges(); + if (withEdge && !edges.isEmpty()) { + iterEdge = edges.iterator(); + } else { + iterEdge = HugeTraverser.EdgeRecord.getEdgeIds(edges).iterator(); + } + + return manager.serializer(g, measure.measures()) + .writePaths("rings", paths, false, + iterVertex, iterEdge); } } diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/SameNeighborsAPI.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/SameNeighborsAPI.java index a7a1770fd0..489ca08054 100644 --- a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/SameNeighborsAPI.java +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/SameNeighborsAPI.java @@ -20,30 +20,39 @@ import static org.apache.hugegraph.traversal.algorithm.HugeTraverser.DEFAULT_ELEMENTS_LIMIT; import static org.apache.hugegraph.traversal.algorithm.HugeTraverser.DEFAULT_MAX_DEGREE; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; import java.util.Set; -import io.swagger.v3.oas.annotations.tags.Tag; -import jakarta.inject.Singleton; -import jakarta.ws.rs.DefaultValue; -import jakarta.ws.rs.GET; -import jakarta.ws.rs.Path; -import jakarta.ws.rs.PathParam; -import jakarta.ws.rs.Produces; -import jakarta.ws.rs.QueryParam; -import jakarta.ws.rs.core.Context; - -import org.slf4j.Logger; - import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.api.API; import org.apache.hugegraph.api.graph.EdgeAPI; import org.apache.hugegraph.api.graph.VertexAPI; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.core.GraphManager; +import org.apache.hugegraph.structure.HugeVertex; import org.apache.hugegraph.traversal.algorithm.SameNeighborTraverser; import org.apache.hugegraph.type.define.Directions; +import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.Log; +import org.slf4j.Logger; + import com.codahale.metrics.annotation.Timed; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.common.collect.ImmutableMap; + +import io.swagger.v3.oas.annotations.tags.Tag; +import jakarta.inject.Singleton; +import jakarta.ws.rs.DefaultValue; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.core.Context; @Path("graphs/{graph}/traversers/sameneighbors") @Singleton @@ -69,6 +78,8 @@ public String get(@Context GraphManager manager, "direction {}, edge label {}, max degree '{}' and limit '{}'", graph, vertex, other, direction, edgeLabel, maxDegree, limit); + ApiMeasurer measure = new ApiMeasurer(); + Id sourceId = VertexAPI.checkAndParseVertexId(vertex); Id targetId = VertexAPI.checkAndParseVertexId(other); Directions dir = Directions.convert(EdgeAPI.parseDirection(direction)); @@ -77,6 +88,77 @@ public String get(@Context GraphManager manager, SameNeighborTraverser traverser = new SameNeighborTraverser(g); Set neighbors = traverser.sameNeighbors(sourceId, targetId, dir, edgeLabel, maxDegree, limit); - return manager.serializer(g).writeList("same_neighbors", neighbors); + + measure.addIterCount(traverser.vertexIterCounter.get(), + traverser.edgeIterCounter.get()); + + return manager.serializer(g, measure.measures()) + .writeList("same_neighbors", neighbors); + } + + @POST + @Timed + @Produces(APPLICATION_JSON_WITH_CHARSET) + public String sameNeighbors(@Context GraphManager manager, + @PathParam("graph") String graph, + Request request) { + LOG.debug("Graph [{}] get same neighbors among batch, '{}'", graph, request.toString()); + + ApiMeasurer measure = new ApiMeasurer(); + + Directions dir = Directions.convert(EdgeAPI.parseDirection(request.direction)); + HugeGraph g = graph(manager, graph); + SameNeighborTraverser traverser = new SameNeighborTraverser(g); + + List vertexList = request.vertexList; + E.checkArgument(vertexList.size() >= 2, "vertex_list size can't " + + "be less than 2"); + + List vertexIds = new ArrayList<>(); + for (Object obj : vertexList) { + vertexIds.add(HugeVertex.getIdValue(obj)); + } + + Set neighbors = traverser.sameNeighbors(vertexIds, dir, request.labels, + request.maxDegree, request.limit); + measure.addIterCount(traverser.vertexIterCounter.get(), + traverser.edgeIterCounter.get()); + + Iterator iterVertex; + Set ids = new HashSet<>(neighbors); + ids.addAll(vertexIds); + if (request.withVertex && !ids.isEmpty()) { + iterVertex = g.vertices(ids.toArray()); + } else { + iterVertex = ids.iterator(); + } + return manager.serializer(g, measure.measures()) + .writeMap(ImmutableMap.of("same_neighbors", neighbors, + "vertices", iterVertex)); + } + + private static class Request { + + @JsonProperty("max_degree") + public long maxDegree = Long.parseLong(DEFAULT_MAX_DEGREE); + @JsonProperty("limit") + public int limit = Integer.parseInt(DEFAULT_ELEMENTS_LIMIT); + @JsonProperty("vertex_list") + private List vertexList; + @JsonProperty("direction") + private String direction; + @JsonProperty("labels") + private List labels; + @JsonProperty("with_vertex") + private boolean withVertex = false; + + @Override + public String toString() { + return String.format("SameNeighborsBatchRequest{vertex_list=%s," + + "direction=%s,label=%s,max_degree=%d," + + "limit=%d,with_vertex=%s", + this.vertexList, this.direction, this.labels, + this.maxDegree, this.limit, this.withVertex); + } } } diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/ShortestPathAPI.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/ShortestPathAPI.java index 08cbdf74cb..dcc8489ae1 100644 --- a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/ShortestPathAPI.java +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/ShortestPathAPI.java @@ -20,32 +20,36 @@ import static org.apache.hugegraph.traversal.algorithm.HugeTraverser.DEFAULT_CAPACITY; import static org.apache.hugegraph.traversal.algorithm.HugeTraverser.DEFAULT_MAX_DEGREE; +import java.util.Iterator; import java.util.List; - -import io.swagger.v3.oas.annotations.tags.Tag; -import jakarta.inject.Singleton; -import jakarta.ws.rs.DefaultValue; -import jakarta.ws.rs.GET; -import jakarta.ws.rs.Path; -import jakarta.ws.rs.PathParam; -import jakarta.ws.rs.Produces; -import jakarta.ws.rs.QueryParam; -import jakarta.ws.rs.core.Context; - -import org.apache.hugegraph.core.GraphManager; -import org.slf4j.Logger; +import java.util.Set; import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.api.API; import org.apache.hugegraph.api.graph.EdgeAPI; import org.apache.hugegraph.api.graph.VertexAPI; import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.core.GraphManager; import org.apache.hugegraph.traversal.algorithm.HugeTraverser; import org.apache.hugegraph.traversal.algorithm.ShortestPathTraverser; import org.apache.hugegraph.type.define.Directions; import org.apache.hugegraph.util.Log; +import org.apache.tinkerpop.gremlin.structure.Edge; +import org.slf4j.Logger; + import com.codahale.metrics.annotation.Timed; import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; + +import io.swagger.v3.oas.annotations.tags.Tag; +import jakarta.inject.Singleton; +import jakarta.ws.rs.DefaultValue; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.core.Context; @Path("graphs/{graph}/traversers/shortestpath") @Singleton @@ -68,13 +72,21 @@ public String get(@Context GraphManager manager, @DefaultValue(DEFAULT_MAX_DEGREE) long maxDegree, @QueryParam("skip_degree") @DefaultValue("0") long skipDegree, + @QueryParam("with_vertex") + @DefaultValue("false") boolean withVertex, + @QueryParam("with_edge") + @DefaultValue("false") boolean withEdge, @QueryParam("capacity") @DefaultValue(DEFAULT_CAPACITY) long capacity) { LOG.debug("Graph [{}] get shortest path from '{}', to '{}' with " + "direction {}, edge label {}, max depth '{}', " + - "max degree '{}', skipped maxDegree '{}' and capacity '{}'", + "max degree '{}', skipped maxDegree '{}', capacity '{}', " + + "with_vertex '{}' and with_edge '{}'", graph, source, target, direction, edgeLabel, depth, - maxDegree, skipDegree, capacity); + maxDegree, skipDegree, capacity, withVertex, withEdge); + + ApiMeasurer measure = new ApiMeasurer(); + Id sourceId = VertexAPI.checkAndParseVertexId(source); Id targetId = VertexAPI.checkAndParseVertexId(target); Directions dir = Directions.convert(EdgeAPI.parseDirection(direction)); @@ -89,6 +101,29 @@ public String get(@Context GraphManager manager, dir, edgeLabels, depth, maxDegree, skipDegree, capacity); - return manager.serializer(g).writeList("path", path.vertices()); + measure.addIterCount(traverser.vertexIterCounter.get(), + traverser.edgeIterCounter.get()); + + Iterator iterVertex; + List vertexIds = path.vertices(); + if (withVertex && !vertexIds.isEmpty()) { + iterVertex = g.vertices(vertexIds.toArray()); + measure.addIterCount(path.vertices().size(), 0L); + } else { + iterVertex = vertexIds.iterator(); + } + + Iterator iterEdge; + Set edges = path.getEdges(); + if (withEdge) { + iterEdge = edges.iterator(); + } else { + iterEdge = HugeTraverser.EdgeRecord.getEdgeIds(edges).iterator(); + } + + return manager.serializer(g, measure.measures()) + .writeMap(ImmutableMap.of("path", path.vertices(), + "vertices", iterVertex, + "edges", iterEdge)); } } diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/SingleSourceShortestPathAPI.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/SingleSourceShortestPathAPI.java index 8813399ca7..eab339d958 100644 --- a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/SingleSourceShortestPathAPI.java +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/SingleSourceShortestPathAPI.java @@ -22,33 +22,33 @@ import static org.apache.hugegraph.traversal.algorithm.HugeTraverser.DEFAULT_PATHS_LIMIT; import java.util.Iterator; - -import io.swagger.v3.oas.annotations.tags.Tag; -import jakarta.inject.Singleton; -import jakarta.ws.rs.DefaultValue; -import jakarta.ws.rs.GET; -import jakarta.ws.rs.Path; -import jakarta.ws.rs.PathParam; -import jakarta.ws.rs.Produces; -import jakarta.ws.rs.QueryParam; -import jakarta.ws.rs.core.Context; - -import org.apache.tinkerpop.gremlin.structure.Vertex; -import org.apache.hugegraph.core.GraphManager; -import org.slf4j.Logger; +import java.util.Set; import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.api.API; import org.apache.hugegraph.api.graph.EdgeAPI; import org.apache.hugegraph.api.graph.VertexAPI; import org.apache.hugegraph.backend.id.Id; -import org.apache.hugegraph.backend.query.QueryResults; +import org.apache.hugegraph.core.GraphManager; +import org.apache.hugegraph.traversal.algorithm.HugeTraverser; import org.apache.hugegraph.traversal.algorithm.SingleSourceShortestPathTraverser; -import org.apache.hugegraph.traversal.algorithm.SingleSourceShortestPathTraverser.WeightedPaths; import org.apache.hugegraph.type.define.Directions; import org.apache.hugegraph.util.Log; +import org.apache.tinkerpop.gremlin.structure.Edge; +import org.slf4j.Logger; + import com.codahale.metrics.annotation.Timed; +import io.swagger.v3.oas.annotations.tags.Tag; +import jakarta.inject.Singleton; +import jakarta.ws.rs.DefaultValue; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.core.Context; + @Path("graphs/{graph}/traversers/singlesourceshortestpath") @Singleton @Tag(name = "SingleSourceShortestPathAPI") @@ -69,16 +69,22 @@ public String get(@Context GraphManager manager, @DefaultValue(DEFAULT_MAX_DEGREE) long maxDegree, @QueryParam("skip_degree") @DefaultValue("0") long skipDegree, + @QueryParam("with_vertex") + @DefaultValue("false") boolean withVertex, + @QueryParam("with_edge") + @DefaultValue("false") boolean withEdge, @QueryParam("capacity") @DefaultValue(DEFAULT_CAPACITY) long capacity, @QueryParam("limit") - @DefaultValue(DEFAULT_PATHS_LIMIT) int limit, - @QueryParam("with_vertex") boolean withVertex) { + @DefaultValue(DEFAULT_PATHS_LIMIT) int limit) { LOG.debug("Graph [{}] get single source shortest path from '{}' " + "with direction {}, edge label {}, weight property {}, " + - "max degree '{}', limit '{}' and with vertex '{}'", + "max degree '{}', capacity '{}', limit '{}', " + + "with_vertex '{}' and with_edge '{}'", graph, source, direction, edgeLabel, - weight, maxDegree, withVertex); + weight, maxDegree, capacity, limit, withVertex, withEdge); + + ApiMeasurer measure = new ApiMeasurer(); Id sourceId = VertexAPI.checkAndParseVertexId(source); Directions dir = Directions.convert(EdgeAPI.parseDirection(direction)); @@ -86,14 +92,31 @@ public String get(@Context GraphManager manager, HugeGraph g = graph(manager, graph); SingleSourceShortestPathTraverser traverser = new SingleSourceShortestPathTraverser(g); - WeightedPaths paths = traverser.singleSourceShortestPaths( - sourceId, dir, edgeLabel, weight, - maxDegree, skipDegree, capacity, limit); - Iterator iterator = QueryResults.emptyIterator(); - assert paths != null; - if (!paths.isEmpty() && withVertex) { - iterator = g.vertices(paths.vertices().toArray()); + SingleSourceShortestPathTraverser.WeightedPaths paths = + traverser.singleSourceShortestPaths( + sourceId, dir, edgeLabel, weight, + maxDegree, skipDegree, capacity, limit); + measure.addIterCount(traverser.vertexIterCounter.get(), + traverser.edgeIterCounter.get()); + + Iterator iterVertex; + Set vertexIds = paths.vertices(); + if (withVertex && !vertexIds.isEmpty()) { + iterVertex = g.vertices(vertexIds.toArray()); + measure.addIterCount(vertexIds.size(), 0L); + } else { + iterVertex = vertexIds.iterator(); } - return manager.serializer(g).writeWeightedPaths(paths, iterator); + + Iterator iterEdge; + Set edges = paths.getEdges(); + if (withEdge && !edges.isEmpty()) { + iterEdge = edges.iterator(); + } else { + iterEdge = HugeTraverser.EdgeRecord.getEdgeIds(edges).iterator(); + } + + return manager.serializer(g, measure.measures()) + .writeWeightedPaths(paths, iterVertex, iterEdge); } } diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/TemplatePathsAPI.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/TemplatePathsAPI.java index d566fae90d..9b3739acb2 100644 --- a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/TemplatePathsAPI.java +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/TemplatePathsAPI.java @@ -26,30 +26,30 @@ import java.util.List; import java.util.Set; -import io.swagger.v3.oas.annotations.tags.Tag; -import jakarta.inject.Singleton; -import jakarta.ws.rs.Consumes; -import jakarta.ws.rs.POST; -import jakarta.ws.rs.Path; -import jakarta.ws.rs.PathParam; -import jakarta.ws.rs.Produces; -import jakarta.ws.rs.core.Context; - -import org.apache.tinkerpop.gremlin.structure.Vertex; -import org.apache.hugegraph.core.GraphManager; -import org.slf4j.Logger; - import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.backend.id.Id; -import org.apache.hugegraph.backend.query.QueryResults; +import org.apache.hugegraph.core.GraphManager; import org.apache.hugegraph.traversal.algorithm.HugeTraverser; import org.apache.hugegraph.traversal.algorithm.TemplatePathsTraverser; import org.apache.hugegraph.traversal.algorithm.steps.RepeatEdgeStep; import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.Log; +import org.apache.tinkerpop.gremlin.structure.Edge; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.slf4j.Logger; + import com.codahale.metrics.annotation.Timed; import com.fasterxml.jackson.annotation.JsonProperty; +import io.swagger.v3.oas.annotations.tags.Tag; +import jakarta.inject.Singleton; +import jakarta.ws.rs.Consumes; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.core.Context; + @Path("graphs/{graph}/traversers/templatepaths") @Singleton @Tag(name = "TemplatePathsAPI") @@ -57,6 +57,22 @@ public class TemplatePathsAPI extends TraverserAPI { private static final Logger LOG = Log.logger(TemplatePathsAPI.class); + private static List steps(HugeGraph g, + List steps) { + List edgeSteps = new ArrayList<>(steps.size()); + for (TemplatePathStep step : steps) { + edgeSteps.add(repeatEdgeStep(g, step)); + } + return edgeSteps; + } + + private static RepeatEdgeStep repeatEdgeStep(HugeGraph graph, + TemplatePathStep step) { + return new RepeatEdgeStep(graph, step.direction, step.labels, + step.properties, step.maxDegree, + step.skipDegree, step.maxTimes); + } + @POST @Timed @Consumes(APPLICATION_JSON) @@ -74,9 +90,11 @@ public String post(@Context GraphManager manager, LOG.debug("Graph [{}] get template paths from source vertices '{}', " + "target vertices '{}', with steps '{}', " + - "capacity '{}', limit '{}' and with_vertex '{}'", + "capacity '{}', limit '{}', with_vertex '{}' and with_edge '{}'", graph, request.sources, request.targets, request.steps, - request.capacity, request.limit, request.withVertex); + request.capacity, request.limit, request.withVertex, request.withEdge); + + ApiMeasurer measure = new ApiMeasurer(); HugeGraph g = graph(manager, graph); Iterator sources = request.sources.vertices(g); @@ -84,40 +102,38 @@ public String post(@Context GraphManager manager, List steps = steps(g, request.steps); TemplatePathsTraverser traverser = new TemplatePathsTraverser(g); - Set paths; - paths = traverser.templatePaths(sources, targets, steps, + TemplatePathsTraverser.WrappedPathSet wrappedPathSet = + traverser.templatePaths(sources, targets, steps, request.withRing, request.capacity, request.limit); + measure.addIterCount(traverser.vertexIterCounter.get(), + traverser.edgeIterCounter.get()); - if (!request.withVertex) { - return manager.serializer(g).writePaths("paths", paths, false); - } + Set paths = wrappedPathSet.paths(); - Set ids = new HashSet<>(); - for (HugeTraverser.Path p : paths) { - ids.addAll(p.vertices()); + Iterator iterVertex; + Set vertexIds = new HashSet<>(); + for (HugeTraverser.Path path : paths) { + vertexIds.addAll(path.vertices()); } - Iterator iter = QueryResults.emptyIterator(); - if (!ids.isEmpty()) { - iter = g.vertices(ids.toArray()); + if (request.withVertex && !vertexIds.isEmpty()) { + iterVertex = g.vertices(vertexIds.toArray()); + measure.addIterCount(vertexIds.size(), 0L); + } else { + iterVertex = vertexIds.iterator(); } - return manager.serializer(g).writePaths("paths", paths, false, iter); - } - private static List steps(HugeGraph g, - List steps) { - List edgeSteps = new ArrayList<>(steps.size()); - for (TemplatePathStep step : steps) { - edgeSteps.add(repeatEdgeStep(g, step)); + Iterator iterEdge; + Set edges = wrappedPathSet.edges(); + if (request.withEdge && !edges.isEmpty()) { + iterEdge = edges.iterator(); + } else { + iterEdge = HugeTraverser.EdgeRecord.getEdgeIds(edges).iterator(); } - return edgeSteps; - } - private static RepeatEdgeStep repeatEdgeStep(HugeGraph graph, - TemplatePathStep step) { - return new RepeatEdgeStep(graph, step.direction, step.labels, - step.properties, step.maxDegree, - step.skipDegree, step.maxTimes); + return manager.serializer(g, measure.measures()) + .writePaths("paths", paths, false, + iterVertex, iterEdge); } private static class Request { @@ -136,15 +152,17 @@ private static class Request { public int limit = Integer.parseInt(DEFAULT_PATHS_LIMIT); @JsonProperty("with_vertex") public boolean withVertex = false; + @JsonProperty("with_edge") + public boolean withEdge = false; @Override public String toString() { return String.format("TemplatePathsRequest{sources=%s,targets=%s," + "steps=%s,withRing=%s,capacity=%s,limit=%s," + - "withVertex=%s}", + "withVertex=%s,withEdge=%s}", this.sources, this.targets, this.steps, this.withRing, this.capacity, this.limit, - this.withVertex); + this.withVertex, this.withEdge); } } diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/VerticesAPI.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/VerticesAPI.java index 56c4889f81..86364a23bf 100644 --- a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/VerticesAPI.java +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/VerticesAPI.java @@ -22,20 +22,6 @@ import java.util.Iterator; import java.util.List; -import io.swagger.v3.oas.annotations.tags.Tag; -import jakarta.inject.Singleton; -import jakarta.ws.rs.DefaultValue; -import jakarta.ws.rs.GET; -import jakarta.ws.rs.Path; -import jakarta.ws.rs.PathParam; -import jakarta.ws.rs.Produces; -import jakarta.ws.rs.QueryParam; -import jakarta.ws.rs.core.Context; - -import org.apache.tinkerpop.gremlin.structure.Vertex; -import org.apache.hugegraph.core.GraphManager; -import org.slf4j.Logger; - import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.api.API; import org.apache.hugegraph.api.filter.CompressInterceptor.Compress; @@ -43,11 +29,25 @@ import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.query.ConditionQuery; import org.apache.hugegraph.backend.store.Shard; +import org.apache.hugegraph.core.GraphManager; import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.Log; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.slf4j.Logger; + import com.codahale.metrics.annotation.Timed; +import io.swagger.v3.oas.annotations.tags.Tag; +import jakarta.inject.Singleton; +import jakarta.ws.rs.DefaultValue; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.core.Context; + @Path("graphs/{graph}/traversers/vertices") @Singleton @Tag(name = "VerticesAPI") diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/WeightedShortestPathAPI.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/WeightedShortestPathAPI.java index b675f618bc..1c25661f15 100644 --- a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/WeightedShortestPathAPI.java +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/WeightedShortestPathAPI.java @@ -21,20 +21,8 @@ import static org.apache.hugegraph.traversal.algorithm.HugeTraverser.DEFAULT_MAX_DEGREE; import java.util.Iterator; - -import io.swagger.v3.oas.annotations.tags.Tag; -import jakarta.inject.Singleton; -import jakarta.ws.rs.DefaultValue; -import jakarta.ws.rs.GET; -import jakarta.ws.rs.Path; -import jakarta.ws.rs.PathParam; -import jakarta.ws.rs.Produces; -import jakarta.ws.rs.QueryParam; -import jakarta.ws.rs.core.Context; - -import org.apache.tinkerpop.gremlin.structure.Vertex; -import org.apache.hugegraph.core.GraphManager; -import org.slf4j.Logger; +import java.util.List; +import java.util.Set; import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.api.API; @@ -42,13 +30,27 @@ import org.apache.hugegraph.api.graph.VertexAPI; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.query.QueryResults; +import org.apache.hugegraph.core.GraphManager; +import org.apache.hugegraph.traversal.algorithm.HugeTraverser; import org.apache.hugegraph.traversal.algorithm.SingleSourceShortestPathTraverser; -import org.apache.hugegraph.traversal.algorithm.SingleSourceShortestPathTraverser.NodeWithWeight; import org.apache.hugegraph.type.define.Directions; import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.Log; +import org.apache.tinkerpop.gremlin.structure.Edge; +import org.slf4j.Logger; + import com.codahale.metrics.annotation.Timed; +import io.swagger.v3.oas.annotations.tags.Tag; +import jakarta.inject.Singleton; +import jakarta.ws.rs.DefaultValue; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.core.Context; + @Path("graphs/{graph}/traversers/weightedshortestpath") @Singleton @Tag(name = "WeightedShortestPathAPI") @@ -70,16 +72,20 @@ public String get(@Context GraphManager manager, @DefaultValue(DEFAULT_MAX_DEGREE) long maxDegree, @QueryParam("skip_degree") @DefaultValue("0") long skipDegree, + @QueryParam("with_vertex") + @DefaultValue("false") boolean withVertex, + @QueryParam("with_edge") + @DefaultValue("false") boolean withEdge, @QueryParam("capacity") - @DefaultValue(DEFAULT_CAPACITY) long capacity, - @QueryParam("with_vertex") boolean withVertex) { + @DefaultValue(DEFAULT_CAPACITY) long capacity) { LOG.debug("Graph [{}] get weighted shortest path between '{}' and " + "'{}' with direction {}, edge label {}, weight property {}, " + "max degree '{}', skip degree '{}', capacity '{}', " + - "and with vertex '{}'", + "with_vertex '{}' and with_edge '{}'", graph, source, target, direction, edgeLabel, weight, - maxDegree, skipDegree, capacity, withVertex); + maxDegree, skipDegree, capacity, withVertex, withEdge); + ApiMeasurer measure = new ApiMeasurer(); Id sourceId = VertexAPI.checkAndParseVertexId(source); Id targetId = VertexAPI.checkAndParseVertexId(target); Directions dir = Directions.convert(EdgeAPI.parseDirection(direction)); @@ -89,14 +95,38 @@ public String get(@Context GraphManager manager, SingleSourceShortestPathTraverser traverser = new SingleSourceShortestPathTraverser(g); - NodeWithWeight path = traverser.weightedShortestPath( - sourceId, targetId, dir, edgeLabel, weight, - maxDegree, skipDegree, capacity); - Iterator iterator = QueryResults.emptyIterator(); - if (path != null && withVertex) { - assert !path.node().path().isEmpty(); - iterator = g.vertices(path.node().path().toArray()); + SingleSourceShortestPathTraverser.NodeWithWeight node = + traverser.weightedShortestPath(sourceId, targetId, + dir, edgeLabel, weight, + maxDegree, skipDegree, capacity); + measure.addIterCount(traverser.vertexIterCounter.get(), + traverser.edgeIterCounter.get()); + + if (node == null) { + return manager.serializer(g, measure.measures()) + .writeWeightedPath(null, + QueryResults.emptyIterator(), + QueryResults.emptyIterator()); + } + + Iterator iterVertex; + List vertexIds = node.node().path(); + if (withVertex && !vertexIds.isEmpty()) { + iterVertex = g.vertices(vertexIds.toArray()); + measure.addIterCount(vertexIds.size(), 0L); + } else { + iterVertex = vertexIds.iterator(); + } + + Iterator iterEdge; + Set edges = node.getEdges(); + if (withEdge && !edges.isEmpty()) { + iterEdge = edges.iterator(); + } else { + iterEdge = HugeTraverser.EdgeRecord.getEdgeIds(edges).iterator(); } - return manager.serializer(g).writeWeightedPath(path, iterator); + + return manager.serializer(g, measure.measures()) + .writeWeightedPath(node, iterVertex, iterEdge); } } diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/config/ServerOptions.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/config/ServerOptions.java index 95a53faa39..e66b593568 100644 --- a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/config/ServerOptions.java +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/config/ServerOptions.java @@ -264,4 +264,4 @@ public static synchronized ServerOptions instance() { disallowEmpty(), true ); -} +} \ No newline at end of file diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/core/GraphManager.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/core/GraphManager.java index b203c10470..2c73b5ee93 100644 --- a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/core/GraphManager.java +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/core/GraphManager.java @@ -224,6 +224,10 @@ public Serializer serializer(Graph g) { return JsonSerializer.instance(); } + public Serializer serializer(Graph g, Map apiMeasure) { + return JsonSerializer.instance(apiMeasure); + } + public void rollbackAll() { for (Graph graph : this.graphs.values()) { if (graph.features().graph().supportsTransactions() && diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/serializer/JsonSerializer.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/serializer/JsonSerializer.java index 8103602234..035499c598 100644 --- a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/serializer/JsonSerializer.java +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/serializer/JsonSerializer.java @@ -24,11 +24,6 @@ import java.util.List; import java.util.Map; -import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; -import org.apache.tinkerpop.gremlin.structure.Edge; -import org.apache.tinkerpop.gremlin.structure.Vertex; -import org.apache.tinkerpop.gremlin.structure.util.CloseableIterator; - import org.apache.hugegraph.HugeException; import org.apache.hugegraph.api.API; import org.apache.hugegraph.auth.SchemaDefine.AuthElement; @@ -47,25 +42,44 @@ import org.apache.hugegraph.traversal.algorithm.SingleSourceShortestPathTraverser.WeightedPaths; import org.apache.hugegraph.traversal.optimize.TraversalUtil; import org.apache.hugegraph.util.JsonUtil; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; +import org.apache.tinkerpop.gremlin.structure.Edge; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.apache.tinkerpop.gremlin.structure.util.CloseableIterator; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; public class JsonSerializer implements Serializer { private static final int LBUF_SIZE = 1024; - - private static JsonSerializer INSTANCE = new JsonSerializer(); + private static final String MEASURE_KEY = "measure"; + private static final JsonSerializer INSTANCE = new JsonSerializer(); + private Map apiMeasure = null; private JsonSerializer() { } + private JsonSerializer(Map apiMeasure) { + this.apiMeasure = apiMeasure; + } + public static JsonSerializer instance() { return INSTANCE; } + public static JsonSerializer instance(Map apiMeasure) { + return new JsonSerializer(apiMeasure); + } + @Override public String writeMap(Map map) { - return JsonUtil.toJson(map); + ImmutableMap.Builder builder = ImmutableMap.builder(); + builder.putAll(map); + if (this.apiMeasure != null) { + builder.put(MEASURE_KEY, this.apiMeasure); + } + return JsonUtil.toJson(builder.build()); } @Override @@ -73,6 +87,10 @@ public String writeList(String label, Collection list) { try (ByteArrayOutputStream out = new ByteArrayOutputStream(LBUF_SIZE)) { out.write(String.format("{\"%s\": ", label).getBytes(API.CHARSET)); out.write(JsonUtil.toJson(list).getBytes(API.CHARSET)); + if (this.apiMeasure != null) { + out.write(String.format(",\"%s\": ", MEASURE_KEY).getBytes(API.CHARSET)); + out.write(JsonUtil.toJson(this.apiMeasure).getBytes(API.CHARSET)); + } out.write("}".getBytes(API.CHARSET)); return out.toString(API.CHARSET); } catch (Exception e) { @@ -122,6 +140,11 @@ private String writeIterator(String label, Iterator iter, out.write(page.getBytes(API.CHARSET)); } + if (this.apiMeasure != null) { + out.write(String.format(",\"%s\":[", MEASURE_KEY).getBytes(API.CHARSET)); + out.write(JsonUtil.toJson(this.apiMeasure).getBytes(API.CHARSET)); + } + out.write("}".getBytes(API.CHARSET)); return out.toString(API.CHARSET); } catch (HugeException e) { @@ -144,7 +167,7 @@ public String writePropertyKey(PropertyKey propertyKey) { @Override public String writeTaskWithSchema( - SchemaElement.TaskWithSchema taskWithSchema) { + SchemaElement.TaskWithSchema taskWithSchema) { StringBuilder builder = new StringBuilder(); long id = taskWithSchema.task() == null ? 0L : taskWithSchema.task().asLong(); @@ -162,10 +185,14 @@ public String writeTaskWithSchema( "TaskWithSchema, only support " + "[PropertyKey, IndexLabel]", schemaElement); } - return builder.append("{\"").append(type).append("\": ") - .append(schema) - .append(", \"task_id\": ").append(id).append("}") - .toString(); + builder.append("{\"").append(type).append("\": ") + .append(schema).append(", \"task_id\": ") + .append(id); + if (this.apiMeasure != null) { + builder.append(String.format(",\"%s\":[", MEASURE_KEY)); + builder.append(JsonUtil.toJson(this.apiMeasure)); + } + return builder.append("}").toString(); } @Override @@ -245,27 +272,36 @@ public String writeAuthElements(String label, @Override public String writePaths(String name, Collection paths, - boolean withCrossPoint, - Iterator vertices) { + boolean withCrossPoint, Iterator vertices, + Iterator edges) { List> pathList = new ArrayList<>(paths.size()); for (HugeTraverser.Path path : paths) { pathList.add(path.toMap(withCrossPoint)); } - Map results; - if (vertices == null) { - results = ImmutableMap.of(name, pathList); - } else { - results = ImmutableMap.of(name, pathList, "vertices", vertices); + ImmutableMap.Builder builder = ImmutableMap.builder(); + builder.put(name, pathList); + + if (vertices != null) { + builder.put("vertices", vertices); + } + + if (edges != null) { + builder.put("edges", edges); + } + + if (this.apiMeasure != null) { + builder.put(MEASURE_KEY, this.apiMeasure); } - return JsonUtil.toJson(results); + + return JsonUtil.toJson(builder.build()); } @Override public String writeCrosspoints(CrosspointsPaths paths, - Iterator iterator, + Iterator vertices, + Iterator edges, boolean withPath) { - Map results; List> pathList; if (withPath) { pathList = new ArrayList<>(); @@ -275,50 +311,81 @@ public String writeCrosspoints(CrosspointsPaths paths, } else { pathList = ImmutableList.of(); } - results = ImmutableMap.of("crosspoints", paths.crosspoints(), - "paths", pathList, - "vertices", iterator); - return JsonUtil.toJson(results); + ImmutableMap.Builder builder = ImmutableMap.builder() + .put("crosspoints", + paths.crosspoints()) + .put("paths", pathList) + .put("vertices", vertices) + .put("edges", edges); + if (this.apiMeasure != null) { + builder.put(MEASURE_KEY, this.apiMeasure); + } + return JsonUtil.toJson(builder.build()); } @Override public String writeSimilars(SimilarsMap similars, - Iterator vertices) { - return JsonUtil.toJson(ImmutableMap.of("similars", similars.toMap(), - "vertices", vertices)); + Iterator vertices) { + ImmutableMap.Builder builder = ImmutableMap.builder() + .put("similars", + similars.toMap()) + .put("vertices", vertices); + if (this.apiMeasure != null) { + builder.put(MEASURE_KEY, this.apiMeasure); + } + return JsonUtil.toJson(builder.build()); } @Override - public String writeWeightedPath(NodeWithWeight path, - Iterator vertices) { + public String writeWeightedPath(NodeWithWeight path, Iterator vertices, + Iterator edges) { Map pathMap = path == null ? ImmutableMap.of() : path.toMap(); - return JsonUtil.toJson(ImmutableMap.of("path", pathMap, - "vertices", vertices)); + ImmutableMap.Builder builder = ImmutableMap.builder() + .put("path", pathMap) + .put("vertices", vertices) + .put("edges", edges); + if (this.apiMeasure != null) { + builder.put(MEASURE_KEY, this.apiMeasure); + } + return JsonUtil.toJson(builder.build()); } @Override - public String writeWeightedPaths(WeightedPaths paths, - Iterator vertices) { + public String writeWeightedPaths(WeightedPaths paths, Iterator vertices, + Iterator edges) { Map> pathMap = paths == null ? ImmutableMap.of() : paths.toMap(); - return JsonUtil.toJson(ImmutableMap.of("paths", pathMap, - "vertices", vertices)); + ImmutableMap.Builder builder = ImmutableMap.builder() + .put("paths", pathMap) + .put("vertices", vertices) + .put("edges", edges); + if (this.apiMeasure != null) { + builder.put(MEASURE_KEY, this.apiMeasure); + } + return JsonUtil.toJson(builder.build()); } @Override public String writeNodesWithPath(String name, List nodes, long size, Collection paths, - Iterator vertices) { + Iterator vertices, Iterator edges) { List> pathList = new ArrayList<>(); for (HugeTraverser.Path path : paths) { pathList.add(path.toMap(false)); } - Map results; - results = ImmutableMap.of(name, nodes, "size", size, - "paths", pathList, "vertices", vertices); - return JsonUtil.toJson(results); + ImmutableMap.Builder builder = ImmutableMap.builder() + .put(name, nodes) + .put("size", size) + .put("paths", pathList) + .put("vertices", vertices) + .put("edges", edges); + if (this.apiMeasure != null) { + builder.put(MEASURE_KEY, this.apiMeasure); + } + + return JsonUtil.toJson(builder.build()); } } diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/serializer/Serializer.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/serializer/Serializer.java index f3b0cdcace..96fa634202 100644 --- a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/serializer/Serializer.java +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/serializer/Serializer.java @@ -22,9 +22,6 @@ import java.util.List; import java.util.Map; -import org.apache.tinkerpop.gremlin.structure.Edge; -import org.apache.tinkerpop.gremlin.structure.Vertex; - import org.apache.hugegraph.auth.SchemaDefine.AuthElement; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.schema.EdgeLabel; @@ -37,6 +34,8 @@ import org.apache.hugegraph.traversal.algorithm.HugeTraverser; import org.apache.hugegraph.traversal.algorithm.SingleSourceShortestPathTraverser.NodeWithWeight; import org.apache.hugegraph.traversal.algorithm.SingleSourceShortestPathTraverser.WeightedPaths; +import org.apache.tinkerpop.gremlin.structure.Edge; +import org.apache.tinkerpop.gremlin.structure.Vertex; public interface Serializer { @@ -77,23 +76,26 @@ public interface Serializer { String writeAuthElements(String label, List users); String writePaths(String name, Collection paths, - boolean withCrossPoint, Iterator vertices); + boolean withCrossPoint, Iterator vertices, + Iterator edges); default String writePaths(String name, Collection paths, boolean withCrossPoint) { - return this.writePaths(name, paths, withCrossPoint, null); + return this.writePaths(name, paths, withCrossPoint, null, null); } - String writeCrosspoints(CrosspointsPaths paths, Iterator iterator, - boolean withPath); + String writeCrosspoints(CrosspointsPaths paths, Iterator vertices, + Iterator edges, boolean withPath); - String writeSimilars(SimilarsMap similars, Iterator vertices); + String writeSimilars(SimilarsMap similars, Iterator vertices); - String writeWeightedPath(NodeWithWeight path, Iterator vertices); + String writeWeightedPath(NodeWithWeight path, Iterator vertices, + Iterator edges); - String writeWeightedPaths(WeightedPaths paths, Iterator vertices); + String writeWeightedPaths(WeightedPaths paths, Iterator vertices, + Iterator edges); String writeNodesWithPath(String name, List nodes, long size, Collection paths, - Iterator vertices); + Iterator vertices, Iterator edges); } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/comm/TriangleCountAlgorithm.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/comm/TriangleCountAlgorithm.java index 2f512f7169..0fba245966 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/comm/TriangleCountAlgorithm.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/algorithm/comm/TriangleCountAlgorithm.java @@ -26,19 +26,29 @@ import org.apache.commons.lang.mutable.MutableLong; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.id.IdGenerator; -import org.apache.tinkerpop.gremlin.structure.Edge; - import org.apache.hugegraph.job.UserJob; import org.apache.hugegraph.structure.HugeEdge; import org.apache.hugegraph.type.define.Directions; import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.InsertionOrderUtil; +import org.apache.tinkerpop.gremlin.structure.Edge; + import com.google.common.collect.ImmutableMap; public class TriangleCountAlgorithm extends AbstractCommAlgorithm { public static final String ALGO_NAME = "triangle_count"; + protected static int workersWhenBoth(Map parameters) { + Directions direction = direction4Out(parameters); + int workers = workers(parameters); + E.checkArgument(direction == Directions.BOTH || workers <= 0, + "The workers must be not set when direction!=BOTH, " + + "but got workers=%s and direction=%s", + workers, direction); + return workers; + } + @Override public String name() { return ALGO_NAME; @@ -60,16 +70,6 @@ public Object call(UserJob job, Map parameters) { } } - protected static int workersWhenBoth(Map parameters) { - Directions direction = direction4Out(parameters); - int workers = workers(parameters); - E.checkArgument(direction == Directions.BOTH || workers <= 0, - "The workers must be not set when direction!=BOTH, " + - "but got workers=%s and direction=%s", - workers, direction); - return workers; - } - protected static class Traverser extends AlgoTraverser { protected static final String KEY_TRIANGLES = "triangles"; @@ -83,8 +83,12 @@ protected Traverser(UserJob job, String name, int workers) { super(job, name, workers); } + protected static Set newOrderedSet() { + return new TreeSet<>(); + } + public Object triangleCount(Directions direction, long degree) { - Map results = triangles( direction, degree); + Map results = triangles(direction, degree); results = InsertionOrderUtil.newMap(results); results.remove(KEY_TRIADS); return results; @@ -191,7 +195,7 @@ private Set adjacentVertices(Id source, long degree, MutableLong edgesCount) { Iterator adjVertices = this.adjacentVertices(source, Directions.BOTH, - null, degree); + (Id) null, degree); Set set = newOrderedSet(); while (adjVertices.hasNext()) { edgesCount.increment(); @@ -206,7 +210,7 @@ protected long intersect(long degree, Set adjVertices) { Id empty = IdGenerator.ZERO; Iterator vertices; for (Id v : adjVertices) { - vertices = this.adjacentVertices(v, dir, null, degree); + vertices = this.adjacentVertices(v, dir, (Id) null, degree); Id lastVertex = empty; while (vertices.hasNext()) { Id vertex = vertices.next(); @@ -231,9 +235,5 @@ protected long intersect(long degree, Set adjVertices) { protected long localTriads(int size) { return size * (size - 1L) / 2L; } - - protected static Set newOrderedSet() { - return new TreeSet<>(); - } } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/CollectionPathsTraverser.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/CollectionPathsTraverser.java index 76db199498..51e919dff8 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/CollectionPathsTraverser.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/CollectionPathsTraverser.java @@ -21,15 +21,17 @@ import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Set; import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.structure.HugeVertex; import org.apache.hugegraph.traversal.algorithm.steps.EdgeStep; import org.apache.hugegraph.traversal.algorithm.strategy.TraverseStrategy; +import org.apache.hugegraph.util.E; +import org.apache.tinkerpop.gremlin.structure.Edge; import org.apache.tinkerpop.gremlin.structure.Vertex; -import org.apache.hugegraph.structure.HugeVertex; -import org.apache.hugegraph.util.E; import com.google.common.collect.ImmutableList; public class CollectionPathsTraverser extends HugeTraverser { @@ -38,10 +40,10 @@ public CollectionPathsTraverser(HugeGraph graph) { super(graph); } - public Collection paths(Iterator sources, - Iterator targets, - EdgeStep step, int depth, boolean nearest, - long capacity, long limit) { + public WrappedPathCollection paths(Iterator sources, + Iterator targets, + EdgeStep step, int depth, boolean nearest, + long capacity, long limit) { checkCapacity(capacity); checkLimit(limit); @@ -63,31 +65,33 @@ public Collection paths(Iterator sources, "but got: %s", MAX_VERTICES, sourceList.size()); checkPositive(depth, "max depth"); + boolean concurrent = depth >= this.concurrentDepth(); TraverseStrategy strategy = TraverseStrategy.create( - depth >= this.concurrentDepth(), - this.graph()); + concurrent, this.graph()); Traverser traverser; if (nearest) { traverser = new NearestTraverser(this, strategy, sourceList, targetList, step, - depth, capacity, limit); + depth, capacity, limit, concurrent); } else { traverser = new Traverser(this, strategy, sourceList, targetList, step, - depth, capacity, limit); + depth, capacity, limit, concurrent); } do { // Forward traverser.forward(); if (traverser.finished()) { - return traverser.paths(); + Collection paths = traverser.paths(); + return new WrappedPathCollection(paths, traverser.edgeResults.getEdges(paths)); } // Backward traverser.backward(); if (traverser.finished()) { - return traverser.paths(); + Collection paths = traverser.paths(); + return new WrappedPathCollection(paths, traverser.edgeResults.getEdges(paths)); } } while (true); } @@ -98,8 +102,9 @@ private static class Traverser extends PathTraverser { public Traverser(HugeTraverser traverser, TraverseStrategy strategy, Collection sources, Collection targets, - EdgeStep step, int depth, long capacity, long limit) { - super(traverser, strategy, sources, targets, capacity, limit); + EdgeStep step, int depth, long capacity, long limit, + boolean concurrent) { + super(traverser, strategy, sources, targets, capacity, limit, concurrent); this.step = step; this.totalSteps = depth; } @@ -180,15 +185,15 @@ protected void reInitCurrentStepIfNeeded(EdgeStep step, } } - private class NearestTraverser extends Traverser { + private static class NearestTraverser extends Traverser { public NearestTraverser(HugeTraverser traverser, TraverseStrategy strategy, Collection sources, Collection targets, EdgeStep step, int depth, long capacity, - long limit) { + long limit, boolean concurrent) { super(traverser, strategy, sources, targets, step, - depth, capacity, limit); + depth, capacity, limit, concurrent); } @Override @@ -274,4 +279,23 @@ protected int accessedNodes() { return this.sourcesAll.size() + this.targetsAll.size(); } } + + public static class WrappedPathCollection { + + private final Collection paths; + private final Set edges; + + public WrappedPathCollection(Collection paths, Set edges) { + this.paths = paths; + this.edges = edges; + } + + public Collection paths() { + return paths; + } + + public Set edges() { + return edges; + } + } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/CustomizePathsTraverser.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/CustomizePathsTraverser.java index 3d3559b05a..28e30367d6 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/CustomizePathsTraverser.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/CustomizePathsTraverser.java @@ -22,25 +22,55 @@ import java.util.List; import java.util.Map; -import jakarta.ws.rs.core.MultivaluedMap; - import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.backend.id.Id; -import org.apache.tinkerpop.gremlin.structure.Edge; -import org.apache.tinkerpop.gremlin.structure.Vertex; - import org.apache.hugegraph.structure.HugeEdge; import org.apache.hugegraph.structure.HugeVertex; import org.apache.hugegraph.traversal.algorithm.steps.WeightedEdgeStep; import org.apache.hugegraph.util.CollectionUtil; import org.apache.hugegraph.util.E; +import org.apache.tinkerpop.gremlin.structure.Edge; +import org.apache.tinkerpop.gremlin.structure.Vertex; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import jakarta.ws.rs.core.MultivaluedMap; + public class CustomizePathsTraverser extends HugeTraverser { + private final EdgeRecord edgeResults; + public CustomizePathsTraverser(HugeGraph graph) { super(graph); + this.edgeResults = new EdgeRecord(false); + } + + public static List topNPath(List paths, + boolean incr, long limit) { + paths.sort((p1, p2) -> { + WeightPath wp1 = (WeightPath) p1; + WeightPath wp2 = (WeightPath) p2; + int result = Double.compare(wp1.totalWeight(), wp2.totalWeight()); + return incr ? result : -result; + }); + + if (limit == NO_LIMIT || paths.size() <= limit) { + return paths; + } + return paths.subList(0, (int) limit); + } + + private static List sample(List nodes, long sample) { + if (nodes.size() <= sample) { + return nodes; + } + List result = newList((int) sample); + int size = nodes.size(); + for (int random : CollectionUtil.randomSet(0, size, (int) sample)) { + result.add(nodes.get(random)); + } + return result; } public List customizedPaths(Iterator vertices, @@ -64,7 +94,8 @@ public List customizedPaths(Iterator vertices, int pathCount = 0; long access = 0; MultivaluedMap newVertices = null; - root : for (WeightedEdgeStep step : steps) { + root: + for (WeightedEdgeStep step : steps) { stepNum--; newVertices = newMultivalueMap(); Iterator edges; @@ -75,7 +106,11 @@ public List customizedPaths(Iterator vertices, edges = this.edgesOfVertex(entry.getKey(), step.step()); while (edges.hasNext()) { HugeEdge edge = (HugeEdge) edges.next(); + this.edgeIterCounter.addAndGet(1L); Id target = edge.id().otherVertexId(); + + this.edgeResults.addEdge(entry.getKey(), target, edge); + for (Node n : entry.getValue()) { // If have loop, skip target if (n.contains(target)) { @@ -113,6 +148,7 @@ public List customizedPaths(Iterator vertices, } } } + this.vertexIterCounter.addAndGet(sources.size()); // Re-init sources sources = newVertices; } @@ -120,6 +156,9 @@ public List customizedPaths(Iterator vertices, return ImmutableList.of(); } List paths = newList(); + if (newVertices == null) { + return ImmutableList.of(); + } for (List nodes : newVertices.values()) { for (Node n : nodes) { if (sorted) { @@ -133,36 +172,13 @@ public List customizedPaths(Iterator vertices, return paths; } - public static List topNPath(List paths, - boolean incr, long limit) { - paths.sort((p1, p2) -> { - WeightPath wp1 = (WeightPath) p1; - WeightPath wp2 = (WeightPath) p2; - int result = Double.compare(wp1.totalWeight(), wp2.totalWeight()); - return incr ? result : -result; - }); - - if (limit == NO_LIMIT || paths.size() <= limit) { - return paths; - } - return paths.subList(0, (int) limit); - } - - private static List sample(List nodes, long sample) { - if (nodes.size() <= sample) { - return nodes; - } - List result = newList((int) sample); - int size = nodes.size(); - for (int random : CollectionUtil.randomSet(0, size, (int) sample)) { - result.add(nodes.get(random)); - } - return result; + public EdgeRecord edgeResults() { + return edgeResults; } public static class WeightNode extends Node { - private double weight; + private final double weight; public WeightNode(Id id, Node parent, double weight) { super(id, parent); @@ -183,7 +199,7 @@ public List weights() { public static class WeightPath extends Path { - private List weights; + private final List weights; private double totalWeight; public WeightPath(List vertices, diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/CustomizedCrosspointsTraverser.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/CustomizedCrosspointsTraverser.java index f097711e04..7b6bf8f76f 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/CustomizedCrosspointsTraverser.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/CustomizedCrosspointsTraverser.java @@ -24,26 +24,82 @@ import java.util.Set; import java.util.stream.Collectors; -import jakarta.ws.rs.core.MultivaluedMap; - import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.structure.HugeEdge; +import org.apache.hugegraph.structure.HugeVertex; import org.apache.hugegraph.traversal.algorithm.steps.EdgeStep; import org.apache.hugegraph.type.define.Directions; +import org.apache.hugegraph.util.CollectionUtil; +import org.apache.hugegraph.util.E; import org.apache.tinkerpop.gremlin.structure.Edge; import org.apache.tinkerpop.gremlin.structure.Vertex; -import org.apache.hugegraph.structure.HugeEdge; -import org.apache.hugegraph.structure.HugeVertex; -import org.apache.hugegraph.util.CollectionUtil; -import org.apache.hugegraph.util.E; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; +import jakarta.ws.rs.core.MultivaluedMap; + public class CustomizedCrosspointsTraverser extends HugeTraverser { + private final EdgeRecord edgeResults; + public CustomizedCrosspointsTraverser(HugeGraph graph) { super(graph); + this.edgeResults = new EdgeRecord(false); + } + + private static CrosspointsPaths intersectionPaths(List sources, + List paths, + long limit) { + // Split paths by end vertices + MultivaluedMap endVertices = newMultivalueMap(); + for (Path path : paths) { + List vertices = path.vertices(); + int length = vertices.size(); + endVertices.add(vertices.get(0), vertices.get(length - 1)); + } + + Set sourceIds = sources.stream().map(HugeVertex::id) + .collect(Collectors.toSet()); + Set ids = endVertices.keySet(); + if (sourceIds.size() != ids.size() || !sourceIds.containsAll(ids)) { + return CrosspointsPaths.EMPTY; + } + + // Get intersection of end vertices + Collection intersection = null; + for (List ends : endVertices.values()) { + if (intersection == null) { + intersection = ends; + } else { + intersection = CollectionUtil.intersect(intersection, ends); + } + if (intersection == null || intersection.isEmpty()) { + return CrosspointsPaths.EMPTY; + } + } + assert intersection != null; + // Limit intersection number to limit crosspoints vertices in result + int size = intersection.size(); + if (limit != NO_LIMIT && size > limit) { + intersection = newList(intersection).subList(0, size - 1); + } + + // Filter intersection paths + List results = newList(); + for (Path path : paths) { + List vertices = path.vertices(); + int length = vertices.size(); + if (intersection.contains(vertices.get(length - 1))) { + results.add(path); + } + } + return new CrosspointsPaths(newSet(intersection), results); + } + + public EdgeRecord edgeResults() { + return edgeResults; } public CrosspointsPaths crosspointsPaths(Iterator vertices, @@ -64,6 +120,8 @@ public CrosspointsPaths crosspointsPaths(Iterator vertices, initialSources.add(vertex.id(), node); } List paths = newList(); + long edgeCount = 0L; + long vertexCount = 0L; for (PathPattern pathPattern : pathPatterns) { MultivaluedMap sources = initialSources; @@ -79,9 +137,14 @@ public CrosspointsPaths crosspointsPaths(Iterator vertices, for (Map.Entry> entry : sources.entrySet()) { List adjacency = newList(); edges = this.edgesOfVertex(entry.getKey(), step.edgeStep); + vertexCount += 1; while (edges.hasNext()) { HugeEdge edge = (HugeEdge) edges.next(); + edgeCount += 1; Id target = edge.id().otherVertexId(); + + this.edgeResults.addEdge(entry.getKey(), target, edge); + for (Node n : entry.getValue()) { // If have loop, skip target if (n.contains(target)) { @@ -104,67 +167,21 @@ public CrosspointsPaths crosspointsPaths(Iterator vertices, sources = newVertices; } assert stepNum == 0; + assert newVertices != null; for (List nodes : newVertices.values()) { for (Node n : nodes) { paths.add(new Path(n.path())); } } } + this.vertexIterCounter.addAndGet(vertexCount); + this.edgeIterCounter.addAndGet(edgeCount); return intersectionPaths(verticesList, paths, limit); } - private static CrosspointsPaths intersectionPaths(List sources, - List paths, - long limit) { - // Split paths by end vertices - MultivaluedMap endVertices = newMultivalueMap(); - for (Path path : paths) { - List vertices = path.vertices(); - int length = vertices.size(); - endVertices.add(vertices.get(0), vertices.get(length - 1)); - } - - Set sourceIds = sources.stream().map(HugeVertex::id) - .collect(Collectors.toSet()); - Set ids = endVertices.keySet(); - if (sourceIds.size() != ids.size() || !sourceIds.containsAll(ids)) { - return CrosspointsPaths.EMPTY; - } - - // Get intersection of end vertices - Collection intersection = null; - for (List ends : endVertices.values()) { - if (intersection == null) { - intersection = ends; - } else { - intersection = CollectionUtil.intersect(intersection, ends); - } - if (intersection == null || intersection.isEmpty()) { - return CrosspointsPaths.EMPTY; - } - } - assert intersection != null; - // Limit intersection number to limit crosspoints vertices in result - int size = intersection.size(); - if (limit != NO_LIMIT && size > limit) { - intersection = newList(intersection).subList(0, size - 1); - } - - // Filter intersection paths - List results = newList(); - for (Path path : paths) { - List vertices = path.vertices(); - int length = vertices.size(); - if (intersection.contains(vertices.get(length - 1))) { - results.add(path); - } - } - return new CrosspointsPaths(newSet(intersection), results); - } - public static class PathPattern { - private List steps; + private final List steps; public PathPattern() { this.steps = newList(); @@ -201,8 +218,8 @@ public static class CrosspointsPaths { ImmutableSet.of(), ImmutableList.of() ); - private Set crosspoints; - private List paths; + private final Set crosspoints; + private final List paths; public CrosspointsPaths(Set crosspoints, List paths) { this.crosspoints = crosspoints; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/FusiformSimilarityTraverser.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/FusiformSimilarityTraverser.java index b39ea009ff..b322167c28 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/FusiformSimilarityTraverser.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/FusiformSimilarityTraverser.java @@ -22,27 +22,27 @@ import java.util.Map; import java.util.Set; -import jakarta.ws.rs.core.MultivaluedHashMap; -import jakarta.ws.rs.core.MultivaluedMap; - import org.apache.commons.lang3.mutable.MutableInt; import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.schema.EdgeLabel; +import org.apache.hugegraph.structure.HugeEdge; +import org.apache.hugegraph.structure.HugeVertex; import org.apache.hugegraph.type.define.Directions; import org.apache.hugegraph.type.define.Frequency; +import org.apache.hugegraph.util.E; +import org.apache.hugegraph.util.InsertionOrderUtil; import org.apache.tinkerpop.gremlin.structure.Edge; import org.apache.tinkerpop.gremlin.structure.Vertex; import org.apache.tinkerpop.gremlin.util.iterator.IteratorUtils; -import org.apache.hugegraph.structure.HugeEdge; -import org.apache.hugegraph.structure.HugeVertex; -import org.apache.hugegraph.util.E; -import org.apache.hugegraph.util.InsertionOrderUtil; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; +import jakarta.ws.rs.core.MultivaluedHashMap; +import jakarta.ws.rs.core.MultivaluedMap; + public class FusiformSimilarityTraverser extends HugeTraverser { private long accessed = 0L; @@ -51,6 +51,20 @@ public FusiformSimilarityTraverser(HugeGraph graph) { super(graph); } + private static void checkGroupArgs(String groupProperty, int minGroups) { + if (groupProperty == null) { + E.checkArgument(minGroups == 0, + "Can't set min group count when " + + "group property not set"); + } else { + E.checkArgument(!groupProperty.isEmpty(), + "The group property can't be empty"); + E.checkArgument(minGroups > 0, + "Must set min group count when " + + "group property set"); + } + } + public SimilarsMap fusiformSimilarity(Iterator vertices, Directions direction, String label, int minNeighbors, double alpha, @@ -69,10 +83,10 @@ public SimilarsMap fusiformSimilarity(Iterator vertices, HugeVertex vertex = (HugeVertex) vertices.next(); // Find fusiform similarity for current vertex Set result = this.fusiformSimilarityForVertex( - vertex, direction, label, - minNeighbors, alpha, minSimilars, top, - groupProperty, minGroups, degree, capacity, - withIntermediary); + vertex, direction, label, + minNeighbors, alpha, minSimilars, top, + groupProperty, minGroups, degree, capacity, + withIntermediary); if (result.isEmpty()) { continue; } @@ -87,11 +101,11 @@ public SimilarsMap fusiformSimilarity(Iterator vertices, } private Set fusiformSimilarityForVertex( - HugeVertex vertex, Directions direction, - String label, int minNeighbors, double alpha, - int minSimilars, int top, String groupProperty, - int minGroups, long degree, long capacity, - boolean withIntermediary) { + HugeVertex vertex, Directions direction, + String label, int minNeighbors, double alpha, + int minSimilars, int top, String groupProperty, + int minGroups, long degree, long capacity, + boolean withIntermediary) { boolean matched = this.matchMinNeighborCount(vertex, direction, label, minNeighbors, degree); if (!matched) { @@ -105,6 +119,7 @@ private Set fusiformSimilarityForVertex( Map similars = newMap(); MultivaluedMap intermediaries = new MultivaluedHashMap<>(); Set neighbors = newIdSet(); + long vertexCount = 1L; while (edges.hasNext()) { Id target = ((HugeEdge) edges.next()).id().otherVertexId(); if (neighbors.contains(target)) { @@ -116,6 +131,7 @@ private Set fusiformSimilarityForVertex( Directions backDir = direction.opposite(); Iterator backEdges = this.edgesOfVertex(target, backDir, labelId, degree); + vertexCount += 1L; Set currentSimilars = newIdSet(); while (backEdges.hasNext()) { Id node = ((HugeEdge) backEdges.next()).id().otherVertexId(); @@ -137,6 +153,9 @@ private Set fusiformSimilarityForVertex( count.increment(); } } + this.edgeIterCounter.addAndGet(this.accessed); + this.vertexIterCounter.addAndGet(vertexCount); + // Delete source vertex assert similars.containsKey(vertex.id()); similars.remove(vertex.id()); @@ -189,20 +208,6 @@ private Set fusiformSimilarityForVertex( return result; } - private static void checkGroupArgs(String groupProperty, int minGroups) { - if (groupProperty == null) { - E.checkArgument(minGroups == 0, - "Can't set min group count when " + - "group property not set"); - } else { - E.checkArgument(!groupProperty.isEmpty(), - "The group property can't be empty"); - E.checkArgument(minGroups > 0, - "Must set min group count when " + - "group property set"); - } - } - private boolean matchMinNeighborCount(HugeVertex vertex, Directions direction, String label, @@ -249,7 +254,7 @@ public Similar(Id id, double score, List intermediaries) { this.id = id; this.score = score; assert newSet(intermediaries).size() == intermediaries.size() : - "Invalid intermediaries"; + "Invalid intermediaries"; this.intermediaries = intermediaries; } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/HugeTraverser.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/HugeTraverser.java index 21344e6b2a..c0d36f31bd 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/HugeTraverser.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/HugeTraverser.java @@ -19,15 +19,16 @@ import java.util.Collection; import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; - -import jakarta.ws.rs.core.MultivaluedHashMap; -import jakarta.ws.rs.core.MultivaluedMap; +import java.util.concurrent.atomic.AtomicLong; +import java.util.stream.Collectors; import org.apache.commons.collections.CollectionUtils; import org.apache.hugegraph.HugeException; @@ -39,39 +40,38 @@ import org.apache.hugegraph.backend.query.QueryResults; import org.apache.hugegraph.backend.tx.GraphTransaction; import org.apache.hugegraph.config.CoreOptions; -import org.apache.hugegraph.schema.SchemaLabel; -import org.apache.hugegraph.traversal.algorithm.steps.EdgeStep; -import org.apache.hugegraph.type.HugeType; -import org.apache.hugegraph.type.define.CollectionType; -import org.apache.hugegraph.type.define.Directions; -import org.apache.hugegraph.type.define.HugeKeys; -import org.apache.hugegraph.util.collection.CollectionFactory; -import org.apache.tinkerpop.gremlin.structure.Edge; -import org.slf4j.Logger; - import org.apache.hugegraph.exception.NotFoundException; import org.apache.hugegraph.iterator.ExtendableIterator; import org.apache.hugegraph.iterator.FilterIterator; import org.apache.hugegraph.iterator.LimitIterator; import org.apache.hugegraph.iterator.MapperIterator; import org.apache.hugegraph.perf.PerfUtil.Watched; +import org.apache.hugegraph.schema.SchemaLabel; import org.apache.hugegraph.structure.HugeEdge; +import org.apache.hugegraph.traversal.algorithm.steps.EdgeStep; import org.apache.hugegraph.traversal.optimize.TraversalUtil; +import org.apache.hugegraph.type.HugeType; +import org.apache.hugegraph.type.define.CollectionType; +import org.apache.hugegraph.type.define.Directions; +import org.apache.hugegraph.type.define.HugeKeys; import org.apache.hugegraph.util.CollectionUtil; import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.InsertionOrderUtil; import org.apache.hugegraph.util.Log; +import org.apache.hugegraph.util.collection.CollectionFactory; +import org.apache.hugegraph.util.collection.ObjectIntMapping; +import org.apache.hugegraph.util.collection.ObjectIntMappingFactory; +import org.apache.tinkerpop.gremlin.structure.Edge; +import org.slf4j.Logger; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; -public class HugeTraverser { - - protected static final Logger LOG = Log.logger(HugeTraverser.class); - - private HugeGraph graph; +import jakarta.ws.rs.core.MultivaluedHashMap; +import jakarta.ws.rs.core.MultivaluedMap; - private static CollectionFactory collectionFactory; +public class HugeTraverser { public static final String DEFAULT_CAPACITY = "10000000"; public static final String DEFAULT_ELEMENTS_LIMIT = "10000000"; @@ -82,13 +82,16 @@ public class HugeTraverser { public static final String DEFAULT_SAMPLE = "100"; public static final String DEFAULT_WEIGHT = "0"; public static final int DEFAULT_MAX_DEPTH = 5000; - - protected static final int MAX_VERTICES = 10; - // Empirical value of scan limit, with which results can be returned in 3s public static final String DEFAULT_PAGE_LIMIT = "100000"; - public static final long NO_LIMIT = -1L; + protected static final Logger LOG = Log.logger(HugeTraverser.class); + protected static final int MAX_VERTICES = 10; + private static CollectionFactory collectionFactory; + private final HugeGraph graph; + // for apimeasure + public AtomicLong edgeIterCounter = new AtomicLong(0); + public AtomicLong vertexIterCounter = new AtomicLong(0); public HugeTraverser(HugeGraph graph) { this.graph = graph; @@ -97,6 +100,178 @@ public HugeTraverser(HugeGraph graph) { } } + public static void checkDegree(long degree) { + checkPositiveOrNoLimit(degree, "max degree"); + } + + public static void checkCapacity(long capacity) { + checkPositiveOrNoLimit(capacity, "capacity"); + } + + public static void checkLimit(long limit) { + checkPositiveOrNoLimit(limit, "limit"); + } + + public static void checkPositive(long value, String name) { + E.checkArgument(value > 0, + "The %s parameter must be > 0, but got %s", + name, value); + } + + public static void checkPositiveOrNoLimit(long value, String name) { + E.checkArgument(value > 0L || value == NO_LIMIT, + "The %s parameter must be > 0 or == %s, but got: %s", + name, NO_LIMIT, value); + } + + public static void checkNonNegative(long value, String name) { + E.checkArgument(value >= 0L, + "The %s parameter must be >= 0, but got: %s", + name, value); + } + + public static void checkNonNegativeOrNoLimit(long value, String name) { + E.checkArgument(value >= 0L || value == NO_LIMIT, + "The %s parameter must be >= 0 or == %s, but got: %s", + name, NO_LIMIT, value); + } + + public static void checkCapacity(long capacity, long access, + String traverse) { + if (capacity != NO_LIMIT && access > capacity) { + throw new HugeException("Exceed capacity '%s' while finding %s", + capacity, traverse); + } + } + + public static void checkSkipDegree(long skipDegree, long degree, + long capacity) { + E.checkArgument(skipDegree >= 0L && + skipDegree <= Query.DEFAULT_CAPACITY, + "The skipped degree must be in [0, %s], but got '%s'", + Query.DEFAULT_CAPACITY, skipDegree); + if (capacity != NO_LIMIT) { + E.checkArgument(degree != NO_LIMIT && degree < capacity, + "The max degree must be < capacity"); + E.checkArgument(skipDegree < capacity, + "The skipped degree must be < capacity"); + } + if (skipDegree > 0L) { + E.checkArgument(degree != NO_LIMIT && skipDegree >= degree, + "The skipped degree must be >= max degree, " + + "but got skipped degree '%s' and max degree '%s'", + skipDegree, degree); + } + } + + public static > Map topN( + Map map, + boolean sorted, + long limit) { + if (sorted) { + map = CollectionUtil.sortByValue(map, false); + } + if (limit == NO_LIMIT || map.size() <= limit) { + return map; + } + Map results = InsertionOrderUtil.newMap(); + long count = 0L; + for (Map.Entry entry : map.entrySet()) { + results.put(entry.getKey(), entry.getValue()); + if (++count >= limit) { + break; + } + } + return results; + } + + public static Iterator skipSuperNodeIfNeeded(Iterator edges, + long degree, + long skipDegree) { + if (skipDegree <= 0L) { + return edges; + } + List edgeList = newList(); + for (int i = 1; edges.hasNext(); i++) { + Edge edge = edges.next(); + if (i <= degree) { + edgeList.add(edge); + } + if (i >= skipDegree) { + return QueryResults.emptyIterator(); + } + } + return edgeList.iterator(); + } + + protected static Set newIdSet() { + return collectionFactory.newIdSet(); + } + + protected static Set newSet() { + return newSet(false); + } + + protected static Set newSet(boolean concurrent) { + if (concurrent) { + return ConcurrentHashMap.newKeySet(); + } else { + return collectionFactory.newSet(); + } + } + + protected static Set newSet(int initialCapacity) { + return collectionFactory.newSet(initialCapacity); + } + + protected static Set newSet(Collection collection) { + return collectionFactory.newSet(collection); + } + + protected static List newList() { + return collectionFactory.newList(); + } + + protected static List newList(int initialCapacity) { + return collectionFactory.newList(initialCapacity); + } + + protected static List newList(Collection collection) { + return collectionFactory.newList(collection); + } + + protected static Map newMap() { + return collectionFactory.newMap(); + } + + protected static Map newMap(int initialCapacity) { + return collectionFactory.newMap(initialCapacity); + } + + protected static MultivaluedMap newMultivalueMap() { + return new MultivaluedHashMap<>(); + } + + protected static List joinPath(Node prev, Node back, boolean ring) { + // Get self path + List path = prev.path(); + + // Get reversed other path + List backPath = back.path(); + Collections.reverse(backPath); + + if (!ring) { + // Avoid loop in path + if (CollectionUtils.containsAny(path, backPath)) { + return ImmutableList.of(); + } + } + + // Append other path behind self path + path.addAll(backPath); + return path; + } + public HugeGraph graph() { return this.graph; } @@ -157,6 +332,15 @@ protected Set adjacentVertices(Id source, EdgeStep step) { return neighbors; } + protected Iterator adjacentVertices(Id source, Directions dir, + List labels, long limit) { + Iterator edges = this.edgesOfVertex(source, dir, labels, limit); + return new MapperIterator<>(edges, e -> { + HugeEdge edge = (HugeEdge) e; + return edge.id().otherVertexId(); + }); + } + @Watched protected Iterator edgesOfVertex(Id source, Directions dir, Id label, long limit) { @@ -189,9 +373,26 @@ protected Iterator edgesOfVertex(Id source, Directions dir, } long[] count = new long[1]; - return new LimitIterator<>(results, e -> { - return count[0]++ >= limit; - }); + return new LimitIterator<>(results, e -> count[0]++ >= limit); + } + + protected Iterator edgesOfVertex(Id source, Directions dir, + List labels, long limit) { + if (labels == null || labels.isEmpty()) { + return this.edgesOfVertex(source, dir, (Id) null, limit); + } + ExtendableIterator results = new ExtendableIterator<>(); + for (Id label : labels) { + E.checkNotNull(label, "edge label"); + results.extend(this.edgesOfVertex(source, dir, label, limit)); + } + + if (limit == NO_LIMIT) { + return results; + } + + long[] count = new long[1]; + return new LimitIterator<>(results, e -> count[0]++ >= limit); } protected Iterator edgesOfVertex(Id source, EdgeStep edgeStep) { @@ -253,7 +454,7 @@ private void fillFilterBySortKeys(Query query, Id[] edgeLabels, if (!GraphTransaction.matchFullEdgeSortKeys(condQuery, this.graph())) { Id label = condQuery.condition(HugeKeys.LABEL); E.checkArgument(false, "The properties %s does not match " + - "sort keys of edge label '%s'", + "sort keys of edge label '%s'", this.graph().mapPkId2Name(properties.keySet()), this.graph().edgeLabel(label).name()); } @@ -308,182 +509,10 @@ protected void checkVertexExist(Id vertexId, String name) { this.graph.vertex(vertexId); } catch (NotFoundException e) { throw new IllegalArgumentException(String.format( - "The %s with id '%s' does not exist", name, vertexId), e); - } - } - - public static void checkDegree(long degree) { - checkPositiveOrNoLimit(degree, "max degree"); - } - - public static void checkCapacity(long capacity) { - checkPositiveOrNoLimit(capacity, "capacity"); - } - - public static void checkLimit(long limit) { - checkPositiveOrNoLimit(limit, "limit"); - } - - public static void checkPositive(long value, String name) { - E.checkArgument(value > 0, - "The %s parameter must be > 0, but got %s", - name, value); - } - - public static void checkPositiveOrNoLimit(long value, String name) { - E.checkArgument(value > 0L || value == NO_LIMIT, - "The %s parameter must be > 0 or == %s, but got: %s", - name, NO_LIMIT, value); - } - - public static void checkNonNegative(long value, String name) { - E.checkArgument(value >= 0L, - "The %s parameter must be >= 0, but got: %s", - name, value); - } - - public static void checkNonNegativeOrNoLimit(long value, String name) { - E.checkArgument(value >= 0L || value == NO_LIMIT, - "The %s parameter must be >= 0 or == %s, but got: %s", - name, NO_LIMIT, value); - } - - public static void checkCapacity(long capacity, long access, - String traverse) { - if (capacity != NO_LIMIT && access > capacity) { - throw new HugeException("Exceed capacity '%s' while finding %s", - capacity, traverse); - } - } - - public static void checkSkipDegree(long skipDegree, long degree, - long capacity) { - E.checkArgument(skipDegree >= 0L && - skipDegree <= Query.DEFAULT_CAPACITY, - "The skipped degree must be in [0, %s], but got '%s'", - Query.DEFAULT_CAPACITY, skipDegree); - if (capacity != NO_LIMIT) { - E.checkArgument(degree != NO_LIMIT && degree < capacity, - "The max degree must be < capacity"); - E.checkArgument(skipDegree < capacity, - "The skipped degree must be < capacity"); - } - if (skipDegree > 0L) { - E.checkArgument(degree != NO_LIMIT && skipDegree >= degree, - "The skipped degree must be >= max degree, " + - "but got skipped degree '%s' and max degree '%s'", - skipDegree, degree); - } - } - - public static > Map topN( - Map map, - boolean sorted, - long limit) { - if (sorted) { - map = CollectionUtil.sortByValue(map, false); - } - if (limit == NO_LIMIT || map.size() <= limit) { - return map; - } - Map results = InsertionOrderUtil.newMap(); - long count = 0L; - for (Map.Entry entry : map.entrySet()) { - results.put(entry.getKey(), entry.getValue()); - if (++count >= limit) { - break; - } - } - return results; - } - - public static Iterator skipSuperNodeIfNeeded(Iterator edges, - long degree, - long skipDegree) { - if (skipDegree <= 0L) { - return edges; - } - List edgeList = newList(); - for (int i = 1; edges.hasNext(); i++) { - Edge edge = edges.next(); - if (i <= degree) { - edgeList.add(edge); - } - if (i >= skipDegree) { - return QueryResults.emptyIterator(); - } - } - return edgeList.iterator(); - } - - protected static Set newIdSet() { - return collectionFactory.newIdSet(); - } - - protected static Set newSet() { - return newSet(false); - } - - protected static Set newSet(boolean concurrent) { - if (concurrent) { - return ConcurrentHashMap.newKeySet(); - } else { - return collectionFactory.newSet(); + "The %s with id '%s' does not exist", name, vertexId), e); } } - protected static Set newSet(int initialCapacity) { - return collectionFactory.newSet(initialCapacity); - } - - protected static Set newSet(Collection collection) { - return collectionFactory.newSet(collection); - } - - protected static List newList() { - return collectionFactory.newList(); - } - - protected static List newList(int initialCapacity) { - return collectionFactory.newList(initialCapacity); - } - - protected static List newList(Collection collection) { - return collectionFactory.newList(collection); - } - - protected static Map newMap() { - return collectionFactory.newMap(); - } - - protected static Map newMap(int initialCapacity) { - return collectionFactory.newMap(initialCapacity); - } - - protected static MultivaluedMap newMultivalueMap() { - return new MultivaluedHashMap<>(); - } - - protected static List joinPath(Node prev, Node back, boolean ring) { - // Get self path - List path = prev.path(); - - // Get reversed other path - List backPath = back.path(); - Collections.reverse(backPath); - - if (!ring) { - // Avoid loop in path - if (CollectionUtils.containsAny(path, backPath)) { - return ImmutableList.of(); - } - } - - // Append other path behind self path - path.addAll(backPath); - return path; - } - public static class Node { private final Id id; @@ -560,6 +589,7 @@ public static class Path { private final Id crosspoint; private final List vertices; + private Set edges = Collections.emptySet(); public Path(List vertices) { this(null, vertices); @@ -570,6 +600,19 @@ public Path(Id crosspoint, List vertices) { this.vertices = vertices; } + public Path(List vertices, Set edges) { + this(null, vertices); + this.edges = edges; + } + + public Set getEdges() { + return edges; + } + + public void setEdges(Set edges) { + this.edges = edges; + } + public Id crosspoint() { return this.crosspoint; } @@ -615,6 +658,7 @@ public int hashCode() { * Compares the specified object with this path for equality. * Returns true if and only if both have same vertices list * without regard of crosspoint. + * * @param other the object to be compared for equality with this path * @return true if the specified object is equal to this path */ @@ -638,6 +682,13 @@ public static class PathSet implements Set { private final Set paths; + private Set edges = Collections.emptySet(); + + public PathSet(Set paths, Set edges) { + this(paths); + this.edges = edges; + } + public PathSet() { this(newSet()); } @@ -646,6 +697,18 @@ private PathSet(Set paths) { this.paths = paths; } + public Set getPaths() { + return this.paths; + } + + public Set getEdges() { + return edges; + } + + public void setEdges(Set edges) { + this.edges = edges; + } + @Override public boolean add(Path path) { return this.paths.add(path); @@ -729,7 +792,7 @@ public String toString() { } public void append(Id current) { - for (Iterator iter = paths.iterator(); iter.hasNext();) { + for (Iterator iter = paths.iterator(); iter.hasNext(); ) { Path path = iter.next(); if (path.vertices().contains(current)) { iter.remove(); @@ -739,4 +802,80 @@ public void append(Id current) { } } } + + public static class EdgeRecord { + private final Map edgeMap; + private final ObjectIntMapping idMapping; + + public EdgeRecord(boolean concurrent) { + this.edgeMap = new HashMap<>(); + this.idMapping = ObjectIntMappingFactory.newObjectIntMapping(concurrent); + } + + private static Long makeVertexPairIndex(int source, int target) { + return ((long) source & 0xFFFFFFFFL) | + (((long) target << 32) & 0xFFFFFFFF00000000L); + } + + public static Set getEdgeIds(Set edges) { + return edges.stream().map(edge -> ((HugeEdge) edge).id()).collect(Collectors.toSet()); + } + + private int code(Id id) { + if (id.number()) { + long l = id.asLong(); + if (0 <= l && l <= Integer.MAX_VALUE) { + return (int) l; + } + } + int code = this.idMapping.object2Code(id); + assert code > 0; + return -code; + } + + public void addEdge(Id source, Id target, Edge edge) { + Long index = makeVertexPairIndex(this.code(source), this.code(target)); + this.edgeMap.put(index, edge); + } + + private Edge getEdge(Id source, Id target) { + Long index = makeVertexPairIndex(this.code(source), this.code(target)); + return this.edgeMap.get(index); + } + + public Set getEdges(HugeTraverser.Path path) { + if (path == null || path.vertices().isEmpty()) { + return new HashSet<>(); + } + Iterator vertexIter = path.vertices().iterator(); + return getEdges(vertexIter); + } + + public Set getEdges(Collection paths) { + Set edgeIds = new HashSet<>(); + for (HugeTraverser.Path path : paths) { + edgeIds.addAll(getEdges(path)); + } + return edgeIds; + } + + public Set getEdges(Iterator vertexIter) { + Set edges = new HashSet<>(); + Id first = vertexIter.next(); + Id second; + while (vertexIter.hasNext()) { + second = vertexIter.next(); + Edge edge = getEdge(first, second); + if (edge == null) { + edge = getEdge(second, first); + } + if (edge != null) { + edges.add(edge); + } + first = second; + } + return edges; + } + + } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/JaccardSimilarTraverser.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/JaccardSimilarTraverser.java index 240792abe2..a5539cd0f6 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/JaccardSimilarTraverser.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/JaccardSimilarTraverser.java @@ -27,10 +27,10 @@ import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.traversal.algorithm.steps.EdgeStep; import org.apache.hugegraph.type.define.Directions; -import org.apache.tinkerpop.gremlin.util.iterator.IteratorUtils; - import org.apache.hugegraph.util.CollectionUtil; import org.apache.hugegraph.util.E; +import org.apache.tinkerpop.gremlin.util.iterator.IteratorUtils; + import com.google.common.collect.ImmutableMap; public class JaccardSimilarTraverser extends OltpTraverser { @@ -39,6 +39,12 @@ public JaccardSimilarTraverser(HugeGraph graph) { super(graph); } + private static void reachCapacity(long count, long capacity) { + if (capacity != NO_LIMIT && count > capacity) { + throw new HugeException("Reach capacity '%s'", capacity); + } + } + public double jaccardSimilarity(Id vertex, Id other, Directions dir, String label, long degree) { E.checkNotNull(vertex, "vertex id"); @@ -51,9 +57,14 @@ public double jaccardSimilarity(Id vertex, Id other, Directions dir, Id labelId = this.getEdgeLabelId(label); Set sourceNeighbors = IteratorUtils.set(this.adjacentVertices( - vertex, dir, labelId, degree)); + vertex, dir, labelId, degree)); Set targetNeighbors = IteratorUtils.set(this.adjacentVertices( - other, dir, labelId, degree)); + other, dir, labelId, degree)); + + this.vertexIterCounter.addAndGet(2L); + this.edgeIterCounter.addAndGet(sourceNeighbors.size()); + this.edgeIterCounter.addAndGet(targetNeighbors.size()); + return jaccardSimilarity(sourceNeighbors, targetNeighbors); } @@ -96,6 +107,10 @@ public Map jaccardSimilarsConcurrent(Id source, EdgeStep step, // Query neighbors Set layer1s = this.adjacentVertices(source, step); + + this.vertexIterCounter.addAndGet(1L); + this.edgeIterCounter.addAndGet(layer1s.size()); + reachCapacity(count.get() + layer1s.size(), capacity); count.addAndGet(layer1s.size()); if (layer1s.isEmpty()) { @@ -111,6 +126,10 @@ public Map jaccardSimilarsConcurrent(Id source, EdgeStep step, return; } Set layer2s = this.adjacentVertices(id, step); + + this.vertexIterCounter.addAndGet(1L); + this.edgeIterCounter.addAndGet(layer2s.size()); + if (layer2s.isEmpty()) { results.put(id, 0.0D); } @@ -130,6 +149,10 @@ public Map jaccardSimilarsConcurrent(Id source, EdgeStep step, return; } Set layer3s = this.adjacentVertices(id, step); + + this.vertexIterCounter.addAndGet(1L); + this.edgeIterCounter.addAndGet(layer3s.size()); + reachCapacity(count.get() + layer3s.size(), capacity); if (layer3s.isEmpty()) { results.put(id, 0.0D); @@ -152,6 +175,10 @@ public Map jaccardSimilarsSingle(Id source, EdgeStep step, // Query neighbors Set layer1s = this.adjacentVertices(source, step); + + this.vertexIterCounter.addAndGet(1L); + this.edgeIterCounter.addAndGet(layer1s.size()); + reachCapacity(count + layer1s.size(), capacity); count += layer1s.size(); if (layer1s.isEmpty()) { @@ -168,6 +195,10 @@ public Map jaccardSimilarsSingle(Id source, EdgeStep step, continue; } layer2s = this.adjacentVertices(neighbor, step); + + this.vertexIterCounter.addAndGet(1L); + this.edgeIterCounter.addAndGet(layer2s.size()); + if (layer2s.isEmpty()) { results.put(neighbor, 0.0D); continue; @@ -188,6 +219,10 @@ public Map jaccardSimilarsSingle(Id source, EdgeStep step, continue; } layer3s = this.adjacentVertices(neighbor, step); + + this.vertexIterCounter.addAndGet(1L); + this.edgeIterCounter.addAndGet(layer3s.size()); + reachCapacity(count + layer3s.size(), capacity); if (layer3s.isEmpty()) { results.put(neighbor, 0.0D); @@ -201,10 +236,4 @@ public Map jaccardSimilarsSingle(Id source, EdgeStep step, return results; } - - private static void reachCapacity(long count, long capacity) { - if (capacity != NO_LIMIT && count > capacity) { - throw new HugeException("Reach capacity '%s'", capacity); - } - } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/KneighborTraverser.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/KneighborTraverser.java index c9381cd423..b3ae29ac8f 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/KneighborTraverser.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/KneighborTraverser.java @@ -23,13 +23,12 @@ import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.structure.HugeEdge; import org.apache.hugegraph.traversal.algorithm.records.KneighborRecords; import org.apache.hugegraph.traversal.algorithm.steps.EdgeStep; import org.apache.hugegraph.type.define.Directions; -import org.apache.tinkerpop.gremlin.structure.Edge; - -import org.apache.hugegraph.structure.HugeEdge; import org.apache.hugegraph.util.E; +import org.apache.tinkerpop.gremlin.structure.Edge; public class KneighborTraverser extends OltpTraverser { @@ -53,12 +52,15 @@ public Set kneighbor(Id sourceV, Directions dir, Set all = newSet(); latest.add(sourceV); + this.vertexIterCounter.addAndGet(1L); while (depth-- > 0) { long remaining = limit == NO_LIMIT ? NO_LIMIT : limit - all.size(); latest = this.adjacentVertices(sourceV, latest, dir, labelId, all, degree, remaining); all.addAll(latest); + this.vertexIterCounter.addAndGet(1L); + this.edgeIterCounter.addAndGet(latest.size()); if (reachLimit(limit, all.size())) { break; } @@ -84,9 +86,15 @@ public KneighborRecords customizedKneighbor(Id source, EdgeStep step, return; } Iterator edges = edgesOfVertex(v, step); + this.vertexIterCounter.addAndGet(1L); while (!this.reachLimit(limit, records.size()) && edges.hasNext()) { - Id target = ((HugeEdge) edges.next()).id().otherVertexId(); + HugeEdge edge = (HugeEdge) edges.next(); + Id target = edge.id().otherVertexId(); records.addPath(v, target); + + records.edgeResults().addEdge(v, target, edge); + + this.edgeIterCounter.addAndGet(1L); } }; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/KoutTraverser.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/KoutTraverser.java index 2c268c94b4..9f40be8fbd 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/KoutTraverser.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/KoutTraverser.java @@ -24,13 +24,12 @@ import org.apache.hugegraph.HugeException; import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.structure.HugeEdge; import org.apache.hugegraph.traversal.algorithm.records.KoutRecords; import org.apache.hugegraph.traversal.algorithm.steps.EdgeStep; import org.apache.hugegraph.type.define.Directions; -import org.apache.tinkerpop.gremlin.structure.Edge; - -import org.apache.hugegraph.structure.HugeEdge; import org.apache.hugegraph.util.E; +import org.apache.tinkerpop.gremlin.structure.Edge; public class KoutTraverser extends OltpTraverser { @@ -66,6 +65,7 @@ public Set kout(Id sourceV, Directions dir, String label, long remaining = capacity == NO_LIMIT ? NO_LIMIT : capacity - latest.size(); + this.vertexIterCounter.addAndGet(1L); while (depth-- > 0) { // Just get limit nodes in last layer if limit < remaining capacity if (depth == 0 && limit != NO_LIMIT && @@ -80,14 +80,16 @@ public Set kout(Id sourceV, Directions dir, String label, latest = this.adjacentVertices(sourceV, latest, dir, labelId, null, degree, remaining); } + this.vertexIterCounter.addAndGet(1L); + this.edgeIterCounter.addAndGet(latest.size()); if (capacity != NO_LIMIT) { // Update 'remaining' value to record remaining capacity remaining -= latest.size(); if (remaining <= 0 && depth > 0) { throw new HugeException( - "Reach capacity '%s' while remaining depth '%s'", - capacity, depth); + "Reach capacity '%s' while remaining depth '%s'", + capacity, depth); } } } @@ -114,11 +116,17 @@ public KoutRecords customizedKout(Id source, EdgeStep step, return; } Iterator edges = edgesOfVertex(v, step); + this.vertexIterCounter.addAndGet(1L); while (!this.reachLimit(limit, depth[0], records.size()) && edges.hasNext()) { - Id target = ((HugeEdge) edges.next()).id().otherVertexId(); + HugeEdge edge = (HugeEdge) edges.next(); + Id target = edge.id().otherVertexId(); records.addPath(v, target); this.checkCapacity(capacity, records.accessed(), depth[0]); + + records.edgeResults().addEdge(v, target, edge); + + this.edgeIterCounter.addAndGet(1L); } }; @@ -136,8 +144,8 @@ private void checkCapacity(long capacity, long accessed, long depth) { } if (accessed >= capacity && depth > 0) { throw new HugeException( - "Reach capacity '%s' while remaining depth '%s'", - capacity, depth); + "Reach capacity '%s' while remaining depth '%s'", + capacity, depth); } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/MultiNodeShortestPathTraverser.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/MultiNodeShortestPathTraverser.java index 493b97286d..aa498fa956 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/MultiNodeShortestPathTraverser.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/MultiNodeShortestPathTraverser.java @@ -19,28 +19,55 @@ import java.util.Iterator; import java.util.List; +import java.util.Set; import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.CopyOnWriteArraySet; import java.util.function.Consumer; import org.apache.commons.lang3.tuple.Pair; import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.structure.HugeVertex; import org.apache.hugegraph.traversal.algorithm.steps.EdgeStep; +import org.apache.hugegraph.util.E; +import org.apache.tinkerpop.gremlin.structure.Edge; import org.apache.tinkerpop.gremlin.structure.Vertex; import org.apache.tinkerpop.gremlin.util.iterator.IteratorUtils; -import org.apache.hugegraph.structure.HugeVertex; -import org.apache.hugegraph.util.E; - public class MultiNodeShortestPathTraverser extends OltpTraverser { public MultiNodeShortestPathTraverser(HugeGraph graph) { super(graph); } - public List multiNodeShortestPath(Iterator vertices, - EdgeStep step, int maxDepth, - long capacity) { + private static void cmn(List all, int m, int n, int current, + List result, Consumer> consumer) { + assert m <= all.size(); + assert current <= all.size(); + if (result == null) { + result = newList(n); + } + if (n == 0) { + // All n items are selected + consumer.accept(result); + return; + } + if (m < n || current >= all.size()) { + return; + } + + // Select current item, continue to select C(m-1, n-1) + int index = result.size(); + result.add(all.get(current)); + cmn(all, m - 1, n - 1, ++current, result, consumer); + // Not select current item, continue to select C(m-1, n) + result.remove(index); + cmn(all, m - 1, n, current, result, consumer); + } + + public WrappedListPath multiNodeShortestPath(Iterator vertices, + EdgeStep step, int maxDepth, + long capacity) { List vertexList = IteratorUtils.list(vertices); int vertexCount = vertexList.size(); E.checkState(vertexCount >= 2 && vertexCount <= MAX_VERTICES, @@ -56,70 +83,70 @@ public List multiNodeShortestPath(Iterator vertices, }); if (maxDepth >= this.concurrentDepth() && vertexCount > 10) { - return this.multiNodeShortestPathConcurrent(pairs, step, - maxDepth, capacity); + return this.multiNodeShortestPathConcurrent(pairs, step, maxDepth, capacity); } else { - return this.multiNodeShortestPathSingle(pairs, step, - maxDepth, capacity); + return this.multiNodeShortestPathSingle(pairs, step, maxDepth, capacity); } } - public List multiNodeShortestPathConcurrent(List> pairs, - EdgeStep step, - int maxDepth, - long capacity) { - List results = new CopyOnWriteArrayList<>(); + public WrappedListPath multiNodeShortestPathConcurrent(List> pairs, + EdgeStep step, int maxDepth, + long capacity) { + List paths = new CopyOnWriteArrayList<>(); + Set edges = new CopyOnWriteArraySet<>(); ShortestPathTraverser traverser = - new ShortestPathTraverser(this.graph()); + new ShortestPathTraverser(this.graph()); this.traversePairs(pairs.iterator(), pair -> { Path path = traverser.shortestPath(pair.getLeft(), pair.getRight(), step, maxDepth, capacity); if (!Path.EMPTY.equals(path)) { - results.add(path); + paths.add(path); } + edges.addAll(path.getEdges()); }); + this.vertexIterCounter.addAndGet(traverser.vertexIterCounter.get()); + this.edgeIterCounter.addAndGet(traverser.edgeIterCounter.get()); - return results; + return new WrappedListPath(paths, edges); } - public List multiNodeShortestPathSingle(List> pairs, - EdgeStep step, int maxDepth, - long capacity) { - List results = newList(); + public WrappedListPath multiNodeShortestPathSingle(List> pairs, + EdgeStep step, int maxDepth, + long capacity) { + List paths = newList(); + Set edges = newSet(); ShortestPathTraverser traverser = - new ShortestPathTraverser(this.graph()); + new ShortestPathTraverser(this.graph()); for (Pair pair : pairs) { Path path = traverser.shortestPath(pair.getLeft(), pair.getRight(), step, maxDepth, capacity); if (!Path.EMPTY.equals(path)) { - results.add(path); + paths.add(path); } + edges.addAll(path.getEdges()); } - return results; + this.vertexIterCounter.addAndGet(traverser.vertexIterCounter.get()); + this.edgeIterCounter.addAndGet(traverser.edgeIterCounter.get()); + + return new WrappedListPath(paths, edges); } - private static void cmn(List all, int m, int n, int current, - List result, Consumer> consumer) { - assert m <= all.size(); - assert current <= all.size(); - if (result == null) { - result = newList(n); - } - if (n == 0) { - // All n items are selected - consumer.accept(result); - return; + public static class WrappedListPath { + + private final List paths; + private final Set edges; + + public WrappedListPath(List paths, Set edges) { + this.paths = paths; + this.edges = edges; } - if (m < n || current >= all.size()) { - return; + + public List paths() { + return paths; } - // Select current item, continue to select C(m-1, n-1) - int index = result.size(); - result.add(all.get(current)); - cmn(all, m - 1, n - 1, ++current, result, consumer); - // Not select current item, continue to select C(m-1, n) - result.remove(index); - cmn(all, m - 1, n, current, result, consumer); + public Set edges() { + return edges; + } } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/PathTraverser.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/PathTraverser.java index 7ae281640d..ac98872c41 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/PathTraverser.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/PathTraverser.java @@ -17,6 +17,8 @@ package org.apache.hugegraph.traversal.algorithm; +import static org.apache.hugegraph.traversal.algorithm.HugeTraverser.NO_LIMIT; + import java.util.Collection; import java.util.Iterator; import java.util.List; @@ -25,21 +27,18 @@ import java.util.function.BiConsumer; import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.structure.HugeEdge; +import org.apache.hugegraph.traversal.algorithm.HugeTraverser.EdgeRecord; import org.apache.hugegraph.traversal.algorithm.steps.EdgeStep; import org.apache.hugegraph.traversal.algorithm.strategy.TraverseStrategy; import org.apache.tinkerpop.gremlin.structure.Edge; -import org.apache.hugegraph.structure.HugeEdge; - -import static org.apache.hugegraph.traversal.algorithm.HugeTraverser.NO_LIMIT; - public abstract class PathTraverser { protected final HugeTraverser traverser; - - protected int stepCount; protected final long capacity; protected final long limit; + protected int stepCount; protected int totalSteps; // TODO: delete or implement abstract method protected Map> sources; @@ -52,10 +51,11 @@ public abstract class PathTraverser { protected Set paths; protected TraverseStrategy traverseStrategy; + protected EdgeRecord edgeResults; public PathTraverser(HugeTraverser traverser, TraverseStrategy strategy, Collection sources, Collection targets, - long capacity, long limit) { + long capacity, long limit, boolean concurrent) { this.traverser = traverser; this.traverseStrategy = strategy; @@ -79,6 +79,8 @@ public PathTraverser(HugeTraverser traverser, TraverseStrategy strategy, this.targetsAll.putAll(this.targets); this.paths = this.newPathSet(); + + this.edgeResults = new EdgeRecord(concurrent); } public void forward() { @@ -145,9 +147,13 @@ private void traverseOne(Id v, EdgeStep step, boolean forward) { while (edges.hasNext()) { HugeEdge edge = (HugeEdge) edges.next(); Id target = edge.id().otherVertexId(); + this.traverser.edgeIterCounter.addAndGet(1L); + + this.edgeResults.addEdge(v, target, edge); this.processOne(v, target, forward); } + this.traverser.vertexIterCounter.addAndGet(1L); } private void processOne(Id source, Id target, boolean forward) { @@ -205,10 +211,7 @@ protected boolean finished() { protected boolean reachLimit() { HugeTraverser.checkCapacity(this.capacity, this.accessedNodes(), "template paths"); - if (this.limit == NO_LIMIT || this.pathCount() < this.limit) { - return false; - } - return true; + return this.limit != NO_LIMIT && this.pathCount() >= this.limit; } protected int accessedNodes() { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/PathsTraverser.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/PathsTraverser.java index bdc9712c53..d8b256082d 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/PathsTraverser.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/PathsTraverser.java @@ -21,13 +21,12 @@ import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.backend.id.Id; -import org.apache.hugegraph.traversal.algorithm.records.PathsRecords; -import org.apache.hugegraph.type.define.Directions; -import org.apache.tinkerpop.gremlin.structure.Edge; - import org.apache.hugegraph.perf.PerfUtil.Watched; import org.apache.hugegraph.structure.HugeEdge; +import org.apache.hugegraph.traversal.algorithm.records.PathsRecords; +import org.apache.hugegraph.type.define.Directions; import org.apache.hugegraph.util.E; +import org.apache.tinkerpop.gremlin.structure.Edge; public class PathsTraverser extends HugeTraverser { @@ -75,6 +74,8 @@ public PathSet paths(Id sourceV, Directions sourceDir, } traverser.backward(sourceV, targetDir); } + vertexIterCounter.addAndGet(traverser.vertexCounter); + edgeIterCounter.addAndGet(traverser.edgeCounter); return traverser.paths(); } @@ -88,6 +89,8 @@ private class Traverser { private final long limit; private final PathSet paths; + private long vertexCounter; + private long edgeCounter; public Traverser(Id sourceV, Id targetV, Id label, long degree, long capacity, long limit) { @@ -96,6 +99,8 @@ public Traverser(Id sourceV, Id targetV, Id label, this.degree = degree; this.capacity = capacity; this.limit = limit; + this.vertexCounter = 0L; + this.edgeCounter = 0L; this.paths = new PathSet(); } @@ -115,10 +120,11 @@ public void forward(Id targetV, Directions direction) { } edges = edgesOfVertex(vid, direction, this.label, this.degree); - + this.vertexCounter += 1L; while (edges.hasNext()) { HugeEdge edge = (HugeEdge) edges.next(); Id target = edge.id().otherVertexId(); + this.edgeCounter += 1L; PathSet results = this.record.findPath(target, null, true, false); @@ -148,10 +154,11 @@ public void backward(Id sourceV, Directions direction) { } edges = edgesOfVertex(vid, direction, this.label, this.degree); - + this.vertexCounter += 1L; while (edges.hasNext()) { HugeEdge edge = (HugeEdge) edges.next(); Id target = edge.id().otherVertexId(); + this.edgeCounter += 1L; PathSet results = this.record.findPath(target, null, true, false); @@ -175,5 +182,9 @@ private boolean reachLimit() { checkCapacity(this.capacity, this.record.accessed(), "paths"); return this.limit != NO_LIMIT && this.paths.size() >= this.limit; } + + public long accessed() { + return this.record.accessed(); + } } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/SameNeighborTraverser.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/SameNeighborTraverser.java index a551eb7cc2..06c5bec275 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/SameNeighborTraverser.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/SameNeighborTraverser.java @@ -17,15 +17,17 @@ package org.apache.hugegraph.traversal.algorithm; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; import java.util.Set; import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.type.define.Directions; -import org.apache.tinkerpop.gremlin.util.iterator.IteratorUtils; - import org.apache.hugegraph.util.CollectionUtil; import org.apache.hugegraph.util.E; +import org.apache.tinkerpop.gremlin.util.iterator.IteratorUtils; public class SameNeighborTraverser extends HugeTraverser { @@ -46,11 +48,56 @@ public Set sameNeighbors(Id vertex, Id other, Directions direction, Id labelId = this.getEdgeLabelId(label); Set sourceNeighbors = IteratorUtils.set(this.adjacentVertices( - vertex, direction, labelId, degree)); + vertex, direction, labelId, degree)); Set targetNeighbors = IteratorUtils.set(this.adjacentVertices( - other, direction, labelId, degree)); + other, direction, labelId, degree)); Set sameNeighbors = (Set) CollectionUtil.intersect( - sourceNeighbors, targetNeighbors); + sourceNeighbors, targetNeighbors); + + this.vertexIterCounter.addAndGet(2L); + this.edgeIterCounter.addAndGet(sourceNeighbors.size()); + this.edgeIterCounter.addAndGet(targetNeighbors.size()); + + if (limit != NO_LIMIT) { + int end = Math.min(sameNeighbors.size(), limit); + sameNeighbors = CollectionUtil.subSet(sameNeighbors, 0, end); + } + return sameNeighbors; + } + + public Set sameNeighbors(List vertexIds, Directions direction, + List labels, long degree, int limit) { + E.checkNotNull(vertexIds, "vertex ids"); + E.checkArgument(vertexIds.size() >= 2, "vertex_list size can't " + + "be less than 2"); + for (Id id : vertexIds) { + this.checkVertexExist(id, "vertex"); + } + E.checkNotNull(direction, "direction"); + checkDegree(degree); + checkLimit(limit); + + List labelsId = new ArrayList<>(); + if (labels != null) { + for (String label : labels) { + labelsId.add(this.getEdgeLabelId(label)); + } + } + + Set sameNeighbors = new HashSet<>(); + for (int i = 0; i < vertexIds.size(); i++) { + Set vertexNeighbors = IteratorUtils.set(this.adjacentVertices( + vertexIds.get(i), direction, labelsId, degree)); + if (i == 0) { + sameNeighbors = vertexNeighbors; + } else { + sameNeighbors = (Set) CollectionUtil.intersect( + sameNeighbors, vertexNeighbors); + } + this.vertexIterCounter.addAndGet(1L); + this.edgeIterCounter.addAndGet(vertexNeighbors.size()); + } + if (limit != NO_LIMIT) { int end = Math.min(sameNeighbors.size(), limit); sameNeighbors = CollectionUtil.subSet(sameNeighbors, 0, end); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/ShortestPathTraverser.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/ShortestPathTraverser.java index e9584191f0..45ccdc6345 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/ShortestPathTraverser.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/ShortestPathTraverser.java @@ -20,18 +20,19 @@ import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Set; import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.perf.PerfUtil.Watched; +import org.apache.hugegraph.structure.HugeEdge; import org.apache.hugegraph.traversal.algorithm.records.ShortestPathRecords; import org.apache.hugegraph.traversal.algorithm.steps.EdgeStep; import org.apache.hugegraph.type.define.Directions; +import org.apache.hugegraph.util.E; import org.apache.tinkerpop.gremlin.structure.Edge; import org.apache.tinkerpop.gremlin.util.iterator.IteratorUtils; -import org.apache.hugegraph.perf.PerfUtil.Watched; -import org.apache.hugegraph.structure.HugeEdge; -import org.apache.hugegraph.util.E; import com.google.common.collect.ImmutableList; public class ShortestPathTraverser extends HugeTraverser { @@ -81,7 +82,15 @@ public Path shortestPath(Id sourceV, Id targetV, Directions dir, checkCapacity(traverser.capacity, traverser.accessed(), "shortest path"); } - return paths.isEmpty() ? Path.EMPTY : paths.iterator().next(); + + this.vertexIterCounter.addAndGet(traverser.vertexCount); + this.edgeIterCounter.addAndGet(traverser.pathResults.accessed()); + + Path path = paths.isEmpty() ? Path.EMPTY : paths.iterator().next(); + + Set edges = traverser.edgeResults.getEdges(path); + path.setEdges(edges); + return path; } public Path shortestPath(Id sourceV, Id targetV, EdgeStep step, @@ -126,31 +135,40 @@ public PathSet allShortestPaths(Id sourceV, Id targetV, Directions dir, checkCapacity(traverser.capacity, traverser.accessed(), "shortest path"); } + + this.vertexIterCounter.addAndGet(traverser.vertexCount); + this.edgeIterCounter.addAndGet(traverser.pathResults.accessed()); + + paths.setEdges(traverser.edgeResults.getEdges(paths)); return paths; } private class Traverser { - private final ShortestPathRecords record; + private final ShortestPathRecords pathResults; + private final EdgeRecord edgeResults; private final Directions direction; private final Map labels; private final long degree; private final long skipDegree; private final long capacity; + private long vertexCount; public Traverser(Id sourceV, Id targetV, Directions dir, Map labels, long degree, long skipDegree, long capacity) { - this.record = new ShortestPathRecords(sourceV, targetV); + this.pathResults = new ShortestPathRecords(sourceV, targetV); + this.edgeResults = new EdgeRecord(false); this.direction = dir; this.labels = labels; this.degree = degree; this.skipDegree = skipDegree; this.capacity = capacity; + this.vertexCount = 0L; } public PathSet traverse(boolean all) { - return this.record.sourcesLessThanTargets() ? + return this.pathResults.sourcesLessThanTargets() ? this.forward(all) : this.backward(all); } @@ -162,21 +180,26 @@ public PathSet forward(boolean all) { PathSet results = new PathSet(); long degree = this.skipDegree > 0L ? this.skipDegree : this.degree; - this.record.startOneLayer(true); - while (this.record.hasNextKey()) { - Id source = this.record.nextKey(); + this.pathResults.startOneLayer(true); + while (this.pathResults.hasNextKey()) { + Id source = this.pathResults.nextKey(); Iterator edges = edgesOfVertex(source, this.direction, this.labels, degree); edges = skipSuperNodeIfNeeded(edges, this.degree, this.skipDegree); + + this.vertexCount += 1L; + while (edges.hasNext()) { HugeEdge edge = (HugeEdge) edges.next(); Id target = edge.id().otherVertexId(); - PathSet paths = this.record.findPath(target, - t -> !this.superNode(t, this.direction), - all, false); + this.edgeResults.addEdge(source, target, edge); + + PathSet paths = this.pathResults.findPath(target, + t -> !this.superNode(t, this.direction), + all, false); if (paths.isEmpty()) { continue; @@ -186,9 +209,10 @@ public PathSet forward(boolean all) { return paths; } } + } - this.record.finishOneLayer(); + this.pathResults.finishOneLayer(); return results; } @@ -202,21 +226,26 @@ public PathSet backward(boolean all) { long degree = this.skipDegree > 0L ? this.skipDegree : this.degree; Directions opposite = this.direction.opposite(); - this.record.startOneLayer(false); - while (this.record.hasNextKey()) { - Id source = this.record.nextKey(); + this.pathResults.startOneLayer(false); + while (this.pathResults.hasNextKey()) { + Id source = this.pathResults.nextKey(); Iterator edges = edgesOfVertex(source, opposite, this.labels, degree); edges = skipSuperNodeIfNeeded(edges, this.degree, this.skipDegree); + + this.vertexCount += 1L; + while (edges.hasNext()) { HugeEdge edge = (HugeEdge) edges.next(); Id target = edge.id().otherVertexId(); - PathSet paths = this.record.findPath(target, - t -> !this.superNode(t, opposite), - all, false); + this.edgeResults.addEdge(source, target, edge); + + PathSet paths = this.pathResults.findPath(target, + t -> !this.superNode(t, opposite), + all, false); if (paths.isEmpty()) { continue; @@ -229,7 +258,7 @@ public PathSet backward(boolean all) { } // Re-init targets - this.record.finishOneLayer(); + this.pathResults.finishOneLayer(); return results; } @@ -244,7 +273,7 @@ private boolean superNode(Id vertex, Directions direction) { } private long accessed() { - return this.record.accessed(); + return this.pathResults.accessed(); } } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/SingleSourceShortestPathTraverser.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/SingleSourceShortestPathTraverser.java index 0929711402..12d90600ec 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/SingleSourceShortestPathTraverser.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/SingleSourceShortestPathTraverser.java @@ -17,6 +17,9 @@ package org.apache.hugegraph.traversal.algorithm; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; @@ -26,14 +29,14 @@ import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.query.QueryResults; -import org.apache.hugegraph.type.define.Directions; -import org.apache.tinkerpop.gremlin.structure.Edge; - import org.apache.hugegraph.structure.HugeEdge; +import org.apache.hugegraph.type.define.Directions; import org.apache.hugegraph.util.CollectionUtil; import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.InsertionOrderUtil; import org.apache.hugegraph.util.NumericUtil; +import org.apache.tinkerpop.gremlin.structure.Edge; + import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -57,13 +60,21 @@ public WeightedPaths singleSourceShortestPaths(Id sourceV, Directions dir, Id labelId = this.getEdgeLabelId(label); Traverser traverser = new Traverser(sourceV, dir, labelId, weight, - degree, skipDegree, capacity, - limit); + degree, skipDegree, capacity, limit); while (true) { // Found, reach max depth or reach capacity, stop searching traverser.forward(); if (traverser.done()) { - return traverser.shortestPaths(); + this.vertexIterCounter.addAndGet(traverser.vertexCount); + this.edgeIterCounter.addAndGet(traverser.edgeCount); + WeightedPaths paths = traverser.shortestPaths(); + List> pathList = paths.pathList(); + Set edges = new HashSet<>(); + for (List path : pathList) { + edges.addAll(traverser.edgeRecord.getEdges(path.iterator())); + } + paths.setEdges(edges); + return paths; } checkCapacity(traverser.capacity, traverser.size, "shortest path"); } @@ -91,18 +102,107 @@ public NodeWithWeight weightedShortestPath(Id sourceV, Id targetV, traverser.forward(); Map results = traverser.shortestPaths(); if (results.containsKey(targetV) || traverser.done()) { - return results.get(targetV); + this.vertexIterCounter.addAndGet(traverser.vertexCount); + this.edgeIterCounter.addAndGet(traverser.edgeCount); + NodeWithWeight nodeWithWeight = results.get(targetV); + if (nodeWithWeight != null) { + Iterator vertexIter = nodeWithWeight.node.path().iterator(); + Set edges = traverser.edgeRecord.getEdges(vertexIter); + nodeWithWeight.setEdges(edges); + } + return nodeWithWeight; } checkCapacity(traverser.capacity, traverser.size, "shortest path"); } } + public static class NodeWithWeight implements Comparable { + + private final double weight; + private final Node node; + + private Set edges = Collections.emptySet(); + + public NodeWithWeight(double weight, Node node) { + this.weight = weight; + this.node = node; + } + + public NodeWithWeight(double weight, Id id, NodeWithWeight prio) { + this(weight, new Node(id, prio.node())); + } + + public Set getEdges() { + return edges; + } + + public void setEdges(Set edges) { + this.edges = edges; + } + + public double weight() { + return weight; + } + + public Node node() { + return this.node; + } + + public Map toMap() { + return ImmutableMap.of("weight", this.weight, + "vertices", this.node().path()); + } + + @Override + public int compareTo(NodeWithWeight other) { + return Double.compare(this.weight, other.weight); + } + } + + public static class WeightedPaths extends LinkedHashMap { + + private static final long serialVersionUID = -313873642177730993L; + private Set edges = Collections.emptySet(); + + public Set getEdges() { + return edges; + } + + public void setEdges(Set edges) { + this.edges = edges; + } + + public Set vertices() { + Set vertices = newIdSet(); + vertices.addAll(this.keySet()); + for (NodeWithWeight nw : this.values()) { + vertices.addAll(nw.node().path()); + } + return vertices; + } + + public List> pathList() { + List> pathList = new ArrayList<>(); + for (NodeWithWeight nw : this.values()) { + pathList.add(nw.node.path()); + } + return pathList; + } + + public Map> toMap() { + Map> results = newMap(); + for (Map.Entry entry : this.entrySet()) { + Id source = entry.getKey(); + NodeWithWeight nw = entry.getValue(); + Map result = nw.toMap(); + results.put(source, result); + } + return results; + } + } + private class Traverser { - private WeightedPaths findingNodes = new WeightedPaths(); - private WeightedPaths foundNodes = new WeightedPaths(); - private Set sources; - private Id source; private final Directions direction; private final Id label; private final String weight; @@ -110,15 +210,21 @@ private class Traverser { private final long skipDegree; private final long capacity; private final long limit; - private long size; + private final WeightedPaths findingNodes = new WeightedPaths(); + private final WeightedPaths foundNodes = new WeightedPaths(); + private final EdgeRecord edgeRecord; + private final Id source; + private final long size; + private Set sources; + private long vertexCount; + private long edgeCount; private boolean done = false; public Traverser(Id sourceV, Directions dir, Id label, String weight, - long degree, long skipDegree, long capacity, - long limit) { + long degree, long skipDegree, long capacity, long limit) { this.source = sourceV; this.sources = ImmutableSet.of(new NodeWithWeight( - 0D, new Node(sourceV, null))); + 0D, new Node(sourceV, null))); this.direction = dir; this.label = label; this.weight = weight; @@ -127,6 +233,9 @@ public Traverser(Id sourceV, Directions dir, Id label, String weight, this.capacity = capacity; this.limit = limit; this.size = 0L; + this.vertexCount = 0L; + this.edgeCount = 0L; + this.edgeRecord = new EdgeRecord(false); } /** @@ -143,12 +252,16 @@ public void forward() { HugeEdge edge = (HugeEdge) edges.next(); Id target = edge.id().otherVertexId(); + this.edgeCount += 1L; + if (this.foundNodes.containsKey(target) || this.source.equals(target)) { // Already find shortest path for target, skip continue; } + this.edgeRecord.addEdge(node.node().id(), target, edge); + double currentWeight = this.edgeWeight(edge); double weight = currentWeight + node.weight(); NodeWithWeight nw = new NodeWithWeight(weight, target, node); @@ -164,9 +277,10 @@ public void forward() { } } } + this.vertexCount += sources.size(); Map sorted = CollectionUtil.sortByValue( - this.findingNodes, true); + this.findingNodes, true); double minWeight = 0; Set newSources = InsertionOrderUtil.newSet(); for (Map.Entry entry : sorted.entrySet()) { @@ -209,7 +323,7 @@ private double edgeWeight(HugeEdge edge) { edgeWeight = 1.0; } else { edgeWeight = NumericUtil.convertToNumber( - edge.value(this.weight)).doubleValue(); + edge.value(this.weight)).doubleValue(); } return edgeWeight; } @@ -232,62 +346,4 @@ private Iterator skipSuperNodeIfNeeded(Iterator edges) { return edgeList.iterator(); } } - - public static class NodeWithWeight implements Comparable { - - private final double weight; - private final Node node; - - public NodeWithWeight(double weight, Node node) { - this.weight = weight; - this.node = node; - } - - public NodeWithWeight(double weight, Id id, NodeWithWeight prio) { - this(weight, new Node(id, prio.node())); - } - - public double weight() { - return weight; - } - - public Node node() { - return this.node; - } - - public Map toMap() { - return ImmutableMap.of("weight", this.weight, - "vertices", this.node().path()); - } - - @Override - public int compareTo(NodeWithWeight other) { - return Double.compare(this.weight, other.weight); - } - } - - public static class WeightedPaths extends LinkedHashMap { - - private static final long serialVersionUID = -313873642177730993L; - - public Set vertices() { - Set vertices = newIdSet(); - vertices.addAll(this.keySet()); - for (NodeWithWeight nw : this.values()) { - vertices.addAll(nw.node().path()); - } - return vertices; - } - - public Map> toMap() { - Map> results = newMap(); - for (Map.Entry entry : this.entrySet()) { - Id source = entry.getKey(); - NodeWithWeight nw = entry.getValue(); - Map result = nw.toMap(); - results.put(source, result); - } - return results; - } - } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/SubGraphTraverser.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/SubGraphTraverser.java index b1e2405c28..25e03996eb 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/SubGraphTraverser.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/SubGraphTraverser.java @@ -22,16 +22,15 @@ import java.util.Map; import java.util.Set; -import jakarta.ws.rs.core.MultivaluedMap; - import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.structure.HugeEdge; import org.apache.hugegraph.type.define.Directions; +import org.apache.hugegraph.util.E; import org.apache.tinkerpop.gremlin.structure.Edge; import org.apache.tinkerpop.gremlin.util.iterator.IteratorUtils; -import org.apache.hugegraph.structure.HugeEdge; -import org.apache.hugegraph.util.E; +import jakarta.ws.rs.core.MultivaluedMap; public class SubGraphTraverser extends HugeTraverser { @@ -39,17 +38,35 @@ public SubGraphTraverser(HugeGraph graph) { super(graph); } - public PathSet rays(Id sourceV, Directions dir, String label, - int depth, long degree, long capacity, long limit) { - return this.subGraphPaths(sourceV, dir, label, depth, degree, - capacity, limit, false, false); + private static boolean hasMultiEdges(List edges, Id target) { + boolean hasOutEdge = false; + boolean hasInEdge = false; + for (Edge edge : edges) { + if (((HugeEdge) edge).id().otherVertexId().equals(target)) { + if (((HugeEdge) edge).direction() == Directions.OUT) { + hasOutEdge = true; + } else { + hasInEdge = true; + } + if (hasOutEdge && hasInEdge) { + return true; + } + } + } + return false; + } + + public PathSet rays(Id sourceV, Directions dir, String label, int depth, + long degree, long capacity, long limit) { + return this.subGraphPaths(sourceV, dir, label, depth, degree, capacity, + limit, false, false); } public PathSet rings(Id sourceV, Directions dir, String label, int depth, boolean sourceInRing, long degree, long capacity, long limit) { - return this.subGraphPaths(sourceV, dir, label, depth, degree, - capacity, limit, true, sourceInRing); + return this.subGraphPaths(sourceV, dir, label, depth, degree, capacity, + limit, true, sourceInRing); } private PathSet subGraphPaths(Id sourceV, Directions dir, String label, @@ -69,48 +86,79 @@ private PathSet subGraphPaths(Id sourceV, Directions dir, String label, capacity, limit, rings, sourceInRing); PathSet paths = new PathSet(); - while (true) { + do { paths.addAll(traverser.forward(dir)); - if (--depth <= 0 || traverser.reachLimit() || - traverser.finished()) { - break; - } - } + } while (--depth > 0 && !traverser.reachLimit() && + !traverser.finished()); + this.vertexIterCounter.addAndGet(traverser.accessedVertices.size()); + this.edgeIterCounter.addAndGet(traverser.edgeCount); + paths.setEdges(traverser.edgeRecord.getEdges(paths)); return paths; } - private static boolean hasMultiEdges(List edges, Id target) { - boolean hasOutEdge = false; - boolean hasInEdge = false; - for (Edge edge : edges) { - if (((HugeEdge) edge).id().otherVertexId().equals(target)) { - if (((HugeEdge) edge).direction() == Directions.OUT) { - hasOutEdge = true; - } else { - hasInEdge = true; - } - if (hasOutEdge && hasInEdge) { - return true; + private static class RingPath extends Path { + + public RingPath(Id crosspoint, List vertices) { + super(crosspoint, vertices); + } + + @Override + public int hashCode() { + int hashCode = 0; + for (Id id : this.vertices()) { + hashCode ^= id.hashCode(); + } + return hashCode; + } + + /** + * Compares the specified object with this path for equality. + * Returns true if other path is equal to or + * reversed of this path. + * + * @param other the object to be compared + * @return true if the specified object is equal to or + * reversed of this path + */ + @Override + public boolean equals(Object other) { + if (!(other instanceof RingPath)) { + return false; + } + List vertices = this.vertices(); + List otherVertices = ((Path) other).vertices(); + + if (vertices.equals(otherVertices)) { + return true; + } + if (vertices.size() != otherVertices.size()) { + return false; + } + for (int i = 0, size = vertices.size(); i < size; i++) { + int j = size - i - 1; + if (!vertices.get(i).equals(otherVertices.get(j))) { + return false; } } + return true; } - return false; } private class Traverser { private final Id source; - private MultivaluedMap sources = newMultivalueMap(); - private Set accessedVertices = newIdSet(); - private final Id label; - private int depth; private final long degree; private final long capacity; private final long limit; private final boolean rings; private final boolean sourceInRing; + private final Set accessedVertices = newIdSet(); + private final EdgeRecord edgeRecord; + private MultivaluedMap sources = newMultivalueMap(); + private int depth; private long pathCount; + private long edgeCount; public Traverser(Id sourceV, Id label, int depth, long degree, long capacity, long limit, boolean rings, @@ -126,6 +174,8 @@ public Traverser(Id sourceV, Id label, int depth, long degree, this.rings = rings; this.sourceInRing = sourceInRing; this.pathCount = 0L; + this.edgeCount = 0L; + this.edgeRecord = new EdgeRecord(false); } /** @@ -140,7 +190,7 @@ public PathSet forward(Directions direction) { Id vid = entry.getKey(); // Record edgeList to determine if multiple edges exist List edgeList = IteratorUtils.list(edgesOfVertex( - vid, direction, this.label, this.degree)); + vid, direction, this.label, this.degree)); edges = edgeList.iterator(); if (!edges.hasNext()) { @@ -163,7 +213,11 @@ public PathSet forward(Directions direction) { while (edges.hasNext()) { neighborCount++; HugeEdge edge = (HugeEdge) edges.next(); + this.edgeCount += 1L; Id target = edge.id().otherVertexId(); + + this.edgeRecord.addEdge(vid, target, edge); + // Avoid deduplicate path if (currentNeighbors.contains(target)) { continue; @@ -241,62 +295,11 @@ public PathSet forward(Directions direction) { private boolean reachLimit() { checkCapacity(this.capacity, this.accessedVertices.size(), this.rings ? "rings" : "rays"); - if (this.limit == NO_LIMIT || this.pathCount < this.limit) { - return false; - } - return true; + return this.limit != NO_LIMIT && this.pathCount >= this.limit; } private boolean finished() { return this.sources.isEmpty(); } } - - private static class RingPath extends Path { - - public RingPath(Id crosspoint, List vertices) { - super(crosspoint, vertices); - } - - @Override - public int hashCode() { - int hashCode = 0; - for (Id id : this.vertices()) { - hashCode ^= id.hashCode(); - } - return hashCode; - } - - /** - * Compares the specified object with this path for equality. - * Returns true if other path is equal to or - * reversed of this path. - * @param other the object to be compared - * @return true if the specified object is equal to or - * reversed of this path - */ - @Override - public boolean equals(Object other) { - if (!(other instanceof RingPath)) { - return false; - } - List vertices = this.vertices(); - List otherVertices = ((Path) other).vertices(); - - if (vertices.equals(otherVertices)) { - return true; - } - if (vertices.size() != otherVertices.size()) { - return false; - } - assert vertices.size() == otherVertices.size(); - for (int i = 0, size = vertices.size(); i < size; i++) { - int j = size - i - 1; - if (!vertices.get(i).equals(otherVertices.get(j))) { - return false; - } - } - return true; - } - } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/TemplatePathsTraverser.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/TemplatePathsTraverser.java index 85ce74651d..949014e192 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/TemplatePathsTraverser.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/TemplatePathsTraverser.java @@ -25,13 +25,13 @@ import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.structure.HugeVertex; import org.apache.hugegraph.traversal.algorithm.steps.EdgeStep; import org.apache.hugegraph.traversal.algorithm.steps.RepeatEdgeStep; import org.apache.hugegraph.traversal.algorithm.strategy.TraverseStrategy; -import org.apache.tinkerpop.gremlin.structure.Vertex; - -import org.apache.hugegraph.structure.HugeVertex; import org.apache.hugegraph.util.E; +import org.apache.tinkerpop.gremlin.structure.Edge; +import org.apache.tinkerpop.gremlin.structure.Vertex; public class TemplatePathsTraverser extends HugeTraverser { @@ -39,11 +39,11 @@ public TemplatePathsTraverser(HugeGraph graph) { super(graph); } - public Set templatePaths(Iterator sources, - Iterator targets, - List steps, - boolean withRing, - long capacity, long limit) { + public WrappedPathSet templatePaths(Iterator sources, + Iterator targets, + List steps, + boolean withRing, long capacity, + long limit) { checkCapacity(capacity); checkLimit(limit); @@ -68,23 +68,26 @@ public Set templatePaths(Iterator sources, for (RepeatEdgeStep step : steps) { totalSteps += step.maxTimes(); } + + boolean concurrent = totalSteps >= this.concurrentDepth(); TraverseStrategy strategy = TraverseStrategy.create( - totalSteps >= this.concurrentDepth(), - this.graph()); + concurrent, this.graph()); Traverser traverser = new Traverser(this, strategy, sourceList, targetList, steps, - withRing, capacity, limit); + withRing, capacity, limit, concurrent); do { // Forward traverser.forward(); if (traverser.finished()) { - return traverser.paths(); + Set paths = traverser.paths(); + return new WrappedPathSet(paths, traverser.edgeResults.getEdges(paths)); } // Backward traverser.backward(); if (traverser.finished()) { - return traverser.paths(); + Set paths = traverser.paths(); + return new WrappedPathSet(paths, traverser.edgeResults.getEdges(paths)); } } while (true); } @@ -98,14 +101,14 @@ private static class Traverser extends PathTraverser { protected int sourceIndex; protected int targetIndex; - protected boolean sourceFinishOneStep = false; - protected boolean targetFinishOneStep = false; + protected boolean sourceFinishOneStep; + protected boolean targetFinishOneStep; public Traverser(HugeTraverser traverser, TraverseStrategy strategy, Collection sources, Collection targets, List steps, boolean withRing, - long capacity, long limit) { - super(traverser, strategy, sources, targets, capacity, limit); + long capacity, long limit, boolean concurrent) { + super(traverser, strategy, sources, targets, capacity, limit, concurrent); this.steps = steps; this.withRing = withRing; @@ -135,7 +138,7 @@ public void beforeTraverse(boolean forward) { public void afterTraverse(EdgeStep step, boolean forward) { Map> all = forward ? this.sourcesAll : - this.targetsAll; + this.targetsAll; this.addNewVerticesToAll(all); this.reInitCurrentStepIfNeeded(step, forward); this.stepCount++; @@ -276,4 +279,23 @@ public boolean lastSuperStep() { this.targetIndex == this.sourceIndex + 1; } } + + public static class WrappedPathSet { + + private final Set paths; + private final Set edges; + + public WrappedPathSet(Set paths, Set edges) { + this.paths = paths; + this.edges = edges; + } + + public Set paths() { + return paths; + } + + public Set edges() { + return edges; + } + } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/records/ShortestPathRecords.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/records/ShortestPathRecords.java index ce3647de2e..fe05d00824 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/records/ShortestPathRecords.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/records/ShortestPathRecords.java @@ -23,14 +23,14 @@ import java.util.function.Function; import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.traversal.algorithm.HugeTraverser.Path; +import org.apache.hugegraph.traversal.algorithm.HugeTraverser.PathSet; import org.apache.hugegraph.traversal.algorithm.records.record.Int2IntRecord; import org.apache.hugegraph.traversal.algorithm.records.record.Record; import org.apache.hugegraph.traversal.algorithm.records.record.RecordType; import org.apache.hugegraph.util.collection.CollectionFactory; import org.apache.hugegraph.util.collection.IntMap; import org.apache.hugegraph.util.collection.IntSet; -import org.apache.hugegraph.traversal.algorithm.HugeTraverser.Path; -import org.apache.hugegraph.traversal.algorithm.HugeTraverser.PathSet; public class ShortestPathRecords extends DoubleWayMultiPathsRecords { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/records/SingleWayMultiPathsRecords.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/records/SingleWayMultiPathsRecords.java index 4e53ecc16b..d41adc92a8 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/records/SingleWayMultiPathsRecords.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/records/SingleWayMultiPathsRecords.java @@ -25,17 +25,18 @@ import org.apache.hugegraph.HugeException; import org.apache.hugegraph.backend.id.Id; -import org.apache.hugegraph.type.define.CollectionType; -import org.apache.hugegraph.util.collection.CollectionFactory; -import org.apache.hugegraph.util.collection.IntIterator; -import org.apache.hugegraph.util.collection.IntMap; -import org.apache.hugegraph.util.collection.IntSet; import org.apache.hugegraph.perf.PerfUtil.Watched; +import org.apache.hugegraph.traversal.algorithm.HugeTraverser.EdgeRecord; import org.apache.hugegraph.traversal.algorithm.HugeTraverser.Path; import org.apache.hugegraph.traversal.algorithm.HugeTraverser.PathSet; import org.apache.hugegraph.traversal.algorithm.records.record.Int2IntRecord; import org.apache.hugegraph.traversal.algorithm.records.record.Record; import org.apache.hugegraph.traversal.algorithm.records.record.RecordType; +import org.apache.hugegraph.type.define.CollectionType; +import org.apache.hugegraph.util.collection.CollectionFactory; +import org.apache.hugegraph.util.collection.IntIterator; +import org.apache.hugegraph.util.collection.IntMap; +import org.apache.hugegraph.util.collection.IntSet; public abstract class SingleWayMultiPathsRecords extends AbstractRecords { @@ -44,7 +45,7 @@ public abstract class SingleWayMultiPathsRecords extends AbstractRecords { private final int sourceCode; private final boolean nearest; private final IntSet accessedVertices; - + private final EdgeRecord edgeResults; private IntIterator parentRecordKeys; public SingleWayMultiPathsRecords(RecordType type, boolean concurrent, @@ -58,6 +59,7 @@ public SingleWayMultiPathsRecords(RecordType type, boolean concurrent, firstRecord.addPath(this.sourceCode, 0); this.records = new Stack<>(); this.records.push(firstRecord); + this.edgeResults = new EdgeRecord(concurrent); this.accessedVertices = CollectionFactory.newIntSet(); } @@ -176,6 +178,10 @@ protected final Stack records() { return this.records; } + public EdgeRecord edgeResults() { + return edgeResults; + } + public abstract int size(); public abstract List ids(long limit); diff --git a/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/api/traversers/JaccardSimilarityApiTest.java b/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/api/traversers/JaccardSimilarityApiTest.java index 1ce5f6d705..6fba538d54 100644 --- a/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/api/traversers/JaccardSimilarityApiTest.java +++ b/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/api/traversers/JaccardSimilarityApiTest.java @@ -19,14 +19,15 @@ import java.util.Map; -import jakarta.ws.rs.core.Response; +import org.apache.hugegraph.api.BaseApiTest; import org.junit.Assert; import org.junit.Before; import org.junit.Test; -import org.apache.hugegraph.api.BaseApiTest; import com.google.common.collect.ImmutableMap; +import jakarta.ws.rs.core.Response; + public class JaccardSimilarityApiTest extends BaseApiTest { static final String PATH = TRAVERSERS_API + "/jaccardsimilarity"; @@ -72,9 +73,10 @@ public void testPost() { "\"top\": 3}", markoId); Response r = client().post(PATH, reqBody); String content = assertResponseStatus(200, r); - Double rippleJaccardSimilarity = assertJsonContains(content, rippleId); - Double peterJaccardSimilarity = assertJsonContains(content, peterId); - Double jsonJaccardSimilarity = assertJsonContains(content, jsonId); + Map jaccardSimilarity = assertJsonContains(content, "jaccard_similarity"); + Double rippleJaccardSimilarity = assertMapContains(jaccardSimilarity, rippleId); + Double peterJaccardSimilarity = assertMapContains(jaccardSimilarity, peterId); + Double jsonJaccardSimilarity = assertMapContains(jaccardSimilarity, jsonId); Assert.assertEquals(0.3333, rippleJaccardSimilarity.doubleValue(), 0.0001); Assert.assertEquals(0.25, peterJaccardSimilarity.doubleValue(), 0.0001); From 46bf8a1840604908935505e2a27bf288f34f32d0 Mon Sep 17 00:00:00 2001 From: lzyxx <94185075+lzyxx77@users.noreply.github.com> Date: Tue, 22 Aug 2023 14:07:34 +0800 Subject: [PATCH 02/24] fix checkstyle: Update StandardStateMachineCallback.java (#2290) During the compilation of your code, an informational message was displayed indicating an issue with the file /home/lzy/hugegraph/hugegraph-core/src/main/java/org/apache/hugegraph/masterelection/StandardStateMachineCallback.java at line 36. The specific problem was that the length of this line exceeded 100 characters, with a total of 101 characters. To address this issue, I have made modifications to this class. I have split the originally long line into multiple lines to ensure that each line's length adheres to the coding standards' specified limits. This action not only aligns with the requirements of the code style, but also improves the readability and maintainability of the code. --- .../hugegraph/masterelection/StandardStateMachineCallback.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/masterelection/StandardStateMachineCallback.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/masterelection/StandardStateMachineCallback.java index 88bec95b31..28e01d2913 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/masterelection/StandardStateMachineCallback.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/masterelection/StandardStateMachineCallback.java @@ -33,7 +33,8 @@ public class StandardStateMachineCallback implements StateMachineCallback { private boolean isMaster = false; - public StandardStateMachineCallback(TaskManager taskManager, GlobalMasterInfo globalMasterInfo) { + public StandardStateMachineCallback(TaskManager taskManager, + GlobalMasterInfo globalMasterInfo) { this.taskManager = taskManager; this.taskManager.enableRoleElected(true); this.globalMasterInfo = globalMasterInfo; From 7c58b738dba1900580a5e4a919c04e357ff94413 Mon Sep 17 00:00:00 2001 From: V_Galaxy Date: Fri, 25 Aug 2023 15:20:44 +0800 Subject: [PATCH 03/24] chore(dist): replace wget to curl to download swagger-ui (#2277) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Main Changes: 1. replace `wget` by `curl` when downloading `swagger-ui` 2. silence the output of `curl` and `tar` commands 3. reuse the existing `v4.15.5.tar.gz` before downloading 4. avoid downloading `swagger-ui` in non-Linux platforms to prevent build failures (there might be a cross-platform build approach available 🤔) 5. wrapp the script content within `` blocks ensures that the script retains its original format when generating the `dist.sh` script (also suppresses automatic indentation) 6. remove intermediate files at the end of the script **An alternative approach**, during the generation of the `dist.sh` script, only the `${final.name}` property from the build process is utilized. It might be possible to separately store a `dist.sh` script within hugegraph-dist, then use `sed` during the build process to replace the value of `${final.name}`, **thereby avoiding the need to embed script content within the pom file**. --------- Co-authored-by: imbajin --- hugegraph-server/hugegraph-dist/dist.sh | 46 +++++ hugegraph-server/hugegraph-dist/pom.xml | 232 +++++++++++++++++------- pom.xml | 26 +++ 3 files changed, 234 insertions(+), 70 deletions(-) create mode 100644 hugegraph-server/hugegraph-dist/dist.sh diff --git a/hugegraph-server/hugegraph-dist/dist.sh b/hugegraph-server/hugegraph-dist/dist.sh new file mode 100644 index 0000000000..64029d49a0 --- /dev/null +++ b/hugegraph-server/hugegraph-dist/dist.sh @@ -0,0 +1,46 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with this +# work for additional information regarding copyright ownership. The ASF +# licenses this file to You under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +VERSION=4.15.5 + +curl --version >/dev/null 2>&1 || + { + echo 'ERROR: Please install `curl` first if you need `swagger-ui`' + exit + } + +# TODO: perhaps it's necessary verify the checksum before reusing the existing tar +if [[ ! -f v$VERSION.tar.gz ]]; then + curl -s -S -L -o v$VERSION.tar.gz \ + https://github.com/swagger-api/swagger-ui/archive/refs/tags/v$VERSION.tar.gz || + { + echo 'ERROR: Download `swagger-ui` failed, please check your network connection' + exit + } +fi + +tar zxf v$VERSION.tar.gz -C . >/dev/null 2>&1 + +echo "window.onload = function() { window.ui = SwaggerUIBundle({ +url:'/openapi.json',dom_id:'#swagger-ui',deepLinking:true,layout:'StandaloneLayout', +presets:[SwaggerUIBundle.presets.apis, SwaggerUIStandalonePreset ], +plugins:[SwaggerUIBundle.plugins.DownloadUrl]});};" > \ + swagger-ui-$VERSION/dist/swagger-initializer.js + +# conceal the VERSION from the outside +mv swagger-ui-$VERSION swagger-ui +echo 'INFO: Successfully download `swagger-ui`' diff --git a/hugegraph-server/hugegraph-dist/pom.xml b/hugegraph-server/hugegraph-dist/pom.xml index fe2287f590..8a9738ddac 100644 --- a/hugegraph-server/hugegraph-dist/pom.xml +++ b/hugegraph-server/hugegraph-dist/pom.xml @@ -129,30 +129,6 @@ - - maven-assembly-plugin - 2.4 - - - assembly-hugegraph - package - - single - - - false - false - ${top.level.dir} - - - ${assembly.descriptor.dir}/assembly.xml - - ${final.name} - - - - - org.apache.maven.plugins maven-clean-plugin @@ -173,52 +149,168 @@ - - - maven-antrun-plugin - - - download-swagger-ui - package - - run - - - - - wget --version 1>/dev/null || exit - wget https://github.com/swagger-api/swagger-ui/archive/refs/tags/v4.15.5.tar.gz - tar zxf v4.15.5.tar.gz - echo "window.onload = function() { window.ui = SwaggerUIBundle({ - url:'/openapi.json',dom_id:'#swagger-ui',deepLinking:true,layout:'StandaloneLayout', - presets:[SwaggerUIBundle.presets.apis, SwaggerUIStandalonePreset ], - plugins:[SwaggerUIBundle.plugins.DownloadUrl]});};" > swagger-ui-4.15.5/dist/swagger-initializer.js - cp -r swagger-ui-4.15.5/dist ../${final.name}/swagger-ui - rm -rfv swagger-ui-4.15.5 dist.sh - - - - - - - - - package - - run - - - - - - - - - - - - - + + + + + org.apache.maven.plugins + maven-assembly-plugin + 2.4 + + + assembly-hugegraph + package + + single + + + false + false + ${top.level.dir} + + + ${assembly.descriptor.dir}/assembly.xml + + ${final.name} + + + + + + org.apache.maven.plugins + maven-antrun-plugin + 1.8 + + + download-swagger-ui + prepare-package + + run + + + + + + + + + + + install-swagger-ui + package + + run + + + + + + + + + + + + + + + + + + + + + + + assembly-hugegraph + + + + maven-assembly-plugin + + + + + + + !skip-assembly-hugegraph + + + + + unix-package + + + + maven-antrun-plugin + + + + + + unix + Linux + + + + + mac-package + + + + maven-antrun-plugin + + + + + + mac + + + + + tar-package + + + + org.apache.maven.plugins + maven-antrun-plugin + 1.8 + + + tar-package + package + + run + + + + + + + + + + + + + + + + + + + !skip-tar-package + + + + diff --git a/pom.xml b/pom.xml index 8a98fd6f15..aacd037edf 100644 --- a/pom.xml +++ b/pom.xml @@ -146,6 +146,32 @@ true + + org.apache.maven.plugins + maven-enforcer-plugin + + + enforce-version + + enforce + + + false + + + + + + [1.8,12) + + + [3.5.0,) + + + + + + From 80491f5ccfe31b00f4979d568bb6be69a103ea65 Mon Sep 17 00:00:00 2001 From: Dandelion <49650772+aroundabout@users.noreply.github.com> Date: Mon, 28 Aug 2023 15:36:21 +0800 Subject: [PATCH 04/24] feat(dist): support pre-load test graph data in docker container (#2241) - Provide the related conf and groovy for user to pre load some data. - Change the start-hugegraph.sh to get the environment variables to decide to pre-load or not. --------- Co-authored-by: imbajin --- hugegraph-server/hugegraph-dist/README.md | 56 +++++++++++++++++++ .../assembly/static/bin/start-hugegraph.sh | 30 ++++++++-- 2 files changed, 80 insertions(+), 6 deletions(-) create mode 100644 hugegraph-server/hugegraph-dist/README.md diff --git a/hugegraph-server/hugegraph-dist/README.md b/hugegraph-server/hugegraph-dist/README.md new file mode 100644 index 0000000000..b8e6499285 --- /dev/null +++ b/hugegraph-server/hugegraph-dist/README.md @@ -0,0 +1,56 @@ +# Deploy Hugegraph server with docker + +## 1. Deploy + +We can use docker to quickly start an inner HugeGraph server with RocksDB in background. + +1. Using docker run + + Use `docker run -itd --name=graph -p 18080:8080 hugegraph/hugegraph` to start hugegraph server. + +2. Using docker compose + + We can also use `docker-compose up -d`. The `docker-compose.yaml` is below: + + ```yaml + version: '3' + services: + graph: + image: hugegraph/hugegraph + ports: + - 18080:8080 + ``` + +## 2. Create Sample Graph on Server Startup + +If you want to **pre-load** some (test) data or graphs in container(by default), you can set the env `PRELOAD=ture` + +If you want to customize the pre-loaded data, please mount the the groovy scripts (not necessary). + + + +1. Using docker run + + Use `docker run -itd --name=graph -p 18080:8080 -e PRELOAD=true -v /path/to/yourScript:/hugegraph/scripts/example.groovy hugegraph/hugegraph` + to start hugegraph server. + +2. Using docker compose + + We can also use `docker-compose up -d` to quickly start. The `docker-compose.yaml` is below: + + ```yaml + version: '3' + services: + graph: + image: hugegraph/hugegraph + environment: + - PRELOAD=true + volumes: + - /path/to/yourscript:/hugegraph/scripts/example.groovy + ports: + - 18080:8080 + ``` + +3. Using start-hugegraph.sh + + If you deploy HugeGraph server without docker, you can also pass arguments using `-p`, like this: `bin/start-hugegraph.sh -p true`. \ No newline at end of file diff --git a/hugegraph-server/hugegraph-dist/src/assembly/static/bin/start-hugegraph.sh b/hugegraph-server/hugegraph-dist/src/assembly/static/bin/start-hugegraph.sh index 85d259d3cd..c53df91b9b 100644 --- a/hugegraph-server/hugegraph-dist/src/assembly/static/bin/start-hugegraph.sh +++ b/hugegraph-server/hugegraph-dist/src/assembly/static/bin/start-hugegraph.sh @@ -18,33 +18,40 @@ OPEN_MONITOR="false" OPEN_SECURITY_CHECK="true" DAEMON="true" +PRELOAD="false" #VERBOSE="" GC_OPTION="" USER_OPTION="" SERVER_STARTUP_TIMEOUT_S=30 -while getopts "d:g:m:s:j:t:v" arg; do +while getopts "d:g:m:p:s:j:t:v" arg; do case ${arg} in d) DAEMON="$OPTARG" ;; g) GC_OPTION="$OPTARG" ;; m) OPEN_MONITOR="$OPTARG" ;; + p) PRELOAD="$OPTARG" ;; s) OPEN_SECURITY_CHECK="$OPTARG" ;; j) USER_OPTION="$OPTARG" ;; t) SERVER_STARTUP_TIMEOUT_S="$OPTARG" ;; # TODO: should remove it in future (check the usage carefully) v) VERBOSE="verbose" ;; - ?) echo "USAGE: $0 [-d true|false] [-g g1] [-m true|false] [-s true|false] [-j java_options] + ?) echo "USAGE: $0 [-d true|false] [-g g1] [-m true|false] [-p true|false] [-s true|false] [-j java_options] [-t timeout]" && exit 1 ;; esac done if [[ "$OPEN_MONITOR" != "true" && "$OPEN_MONITOR" != "false" ]]; then - echo "USAGE: $0 [-d true|false] [-g g1] [-m true|false] [-s true|false] [-j java_options]" + echo "USAGE: $0 [-d true|false] [-g g1] [-m true|false] [-p true|false] [-s true|false] [-j java_options]" exit 1 fi if [[ "$OPEN_SECURITY_CHECK" != "true" && "$OPEN_SECURITY_CHECK" != "false" ]]; then - echo "USAGE: $0 [-d true|false] [-g g1] [-m true|false] [-s true|false] [-j java_options]" + echo "USAGE: $0 [-d true|false] [-g g1] [-m true|false] [-p true|false] [-s true|false] [-j java_options]" + exit 1 +fi + +if [[ "$PRELOAD" != "true" && "$PRELOAD" != "false" ]]; then + echo "USAGE: $0 [-d true|false] [-g g1] [-m true|false] [-p true|false] [-s true|false] [-j java_options]" exit 1 fi @@ -62,6 +69,7 @@ BIN=$(abs_path) TOP="$(cd "$BIN"/../ && pwd)" CONF="$TOP/conf" LOGS="$TOP/logs" +SCRIPTS="$TOP/scripts" PID_FILE="$BIN/pid" . "$BIN"/util.sh @@ -79,13 +87,23 @@ if [ ! -d "$LOGS" ]; then mkdir -p "$LOGS" fi +GREMLIN_SERVER_CONF="gremlin-server.yaml" +if [[ $PRELOAD == "true" ]]; then + GREMLIN_SERVER_CONF="gremlin-server-preload.yaml" + EXAMPLE_SCRPIT="example-preload.groovy" + cp "${CONF}"/gremlin-server.yaml "${CONF}/${GREMLIN_SERVER_CONF}" + cp "${SCRIPTS}"/example.groovy "${SCRIPTS}/${EXAMPLE_SCRPIT}" + sed -i -e "s/empty-sample.groovy/$EXAMPLE_SCRPIT/g" "${CONF}/${GREMLIN_SERVER_CONF}" + sed -i -e '/registerRocksDB/d; /serverStarted/d' "${SCRIPTS}/${EXAMPLE_SCRPIT}" +fi + if [[ $DAEMON == "true" ]]; then echo "Starting HugeGraphServer in daemon mode..." - "${BIN}"/hugegraph-server.sh "${CONF}"/gremlin-server.yaml "${CONF}"/rest-server.properties \ + "${BIN}"/hugegraph-server.sh "${CONF}/${GREMLIN_SERVER_CONF}" "${CONF}"/rest-server.properties \ "${OPEN_SECURITY_CHECK}" "${USER_OPTION}" "${GC_OPTION}" >>"${LOGS}"/hugegraph-server.log 2>&1 & else echo "Starting HugeGraphServer in foreground mode..." - "${BIN}"/hugegraph-server.sh "${CONF}"/gremlin-server.yaml "${CONF}"/rest-server.properties \ + "${BIN}"/hugegraph-server.sh "${CONF}/${GREMLIN_SERVER_CONF}" "${CONF}"/rest-server.properties \ "${OPEN_SECURITY_CHECK}" "${USER_OPTION}" "${GC_OPTION}" >>"${LOGS}"/hugegraph-server.log 2>&1 fi From e5a75e16b23c61ae1dbbd55e327e664d868468b5 Mon Sep 17 00:00:00 2001 From: Chong Shen Date: Tue, 29 Aug 2023 16:09:31 +0800 Subject: [PATCH 05/24] fix(core): close flat mapper iterator after usage (#2281) close [Bug] FlatMapperIterator should be closed after usage #2280 --- .../traversal/algorithm/CountTraverser.java | 27 +++++++++++-------- 1 file changed, 16 insertions(+), 11 deletions(-) diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/CountTraverser.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/CountTraverser.java index 7025aedde0..83b3747e0a 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/CountTraverser.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/CountTraverser.java @@ -32,6 +32,7 @@ import org.apache.hugegraph.iterator.FlatMapperIterator; import org.apache.hugegraph.structure.HugeEdge; import org.apache.hugegraph.util.E; +import org.apache.tinkerpop.gremlin.structure.util.CloseableIterator; public class CountTraverser extends HugeTraverser { @@ -78,19 +79,23 @@ public long count(Id source, List steps, }); } - // The last step, just query count - EdgeStep lastStep = steps.get(stepNum - 1); - while (edges.hasNext()) { - Id target = ((HugeEdge) edges.next()).id().otherVertexId(); - if (this.dedup(target)) { - continue; + try { + // The last step, just query count + EdgeStep lastStep = steps.get(stepNum - 1); + while (edges.hasNext()) { + Id target = ((HugeEdge) edges.next()).id().otherVertexId(); + if (this.dedup(target)) { + continue; + } + // Count last layer vertices(without dedup size) + long edgesCount = this.edgesCount(target, lastStep); + this.count.add(edgesCount); } - // Count last layer vertices(without dedup size) - long edgesCount = this.edgesCount(target, lastStep); - this.count.add(edgesCount); - } - return this.count.longValue(); + return this.count.longValue(); + } finally { + CloseableIterator.closeIterator(edges); + } } private Iterator edgesOfVertexWithCount(Id source, EdgeStep step) { From 7034ef9ab257143fd297059067735ba043693325 Mon Sep 17 00:00:00 2001 From: Dandelion <49650772+aroundabout@users.noreply.github.com> Date: Fri, 8 Sep 2023 22:07:57 +0800 Subject: [PATCH 06/24] fix(dist): avoid var PRELOAD cover environmnet vars (#2302) Update hugegraph-dist/src/assembly/static/bin/start-hugegraph.sh --------- Co-authored-by: imbajin --- .../assembly/static/bin/start-hugegraph.sh | 54 ++++++++----------- .../src/assembly/static/bin/util.sh | 5 ++ 2 files changed, 28 insertions(+), 31 deletions(-) diff --git a/hugegraph-server/hugegraph-dist/src/assembly/static/bin/start-hugegraph.sh b/hugegraph-server/hugegraph-dist/src/assembly/static/bin/start-hugegraph.sh index c53df91b9b..9ad7da8fad 100644 --- a/hugegraph-server/hugegraph-dist/src/assembly/static/bin/start-hugegraph.sh +++ b/hugegraph-server/hugegraph-dist/src/assembly/static/bin/start-hugegraph.sh @@ -18,11 +18,29 @@ OPEN_MONITOR="false" OPEN_SECURITY_CHECK="true" DAEMON="true" -PRELOAD="false" #VERBOSE="" GC_OPTION="" USER_OPTION="" SERVER_STARTUP_TIMEOUT_S=30 +# todo: move abs_path funtion to shell like util.sh +function abs_path() { + SOURCE="${BASH_SOURCE[0]}" + while [[ -h "$SOURCE" ]]; do + DIR="$(cd -P "$(dirname "$SOURCE")" && pwd)" + SOURCE="$(readlink "$SOURCE")" + [[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" + done + cd -P "$(dirname "$SOURCE")" && pwd +} + +BIN=$(abs_path) +TOP="$(cd "$BIN"/../ && pwd)" +CONF="$TOP/conf" +LOGS="$TOP/logs" +SCRIPTS="$TOP/scripts" +PID_FILE="$BIN/pid" + +. "$BIN"/util.sh while getopts "d:g:m:p:s:j:t:v" arg; do case ${arg} in @@ -35,45 +53,19 @@ while getopts "d:g:m:p:s:j:t:v" arg; do t) SERVER_STARTUP_TIMEOUT_S="$OPTARG" ;; # TODO: should remove it in future (check the usage carefully) v) VERBOSE="verbose" ;; - ?) echo "USAGE: $0 [-d true|false] [-g g1] [-m true|false] [-p true|false] [-s true|false] [-j java_options] - [-t timeout]" && exit 1 ;; + # Note: update usage info when the params changed + ?) exit_with_usage_help ;; esac done if [[ "$OPEN_MONITOR" != "true" && "$OPEN_MONITOR" != "false" ]]; then - echo "USAGE: $0 [-d true|false] [-g g1] [-m true|false] [-p true|false] [-s true|false] [-j java_options]" - exit 1 + exit_with_usage_help fi if [[ "$OPEN_SECURITY_CHECK" != "true" && "$OPEN_SECURITY_CHECK" != "false" ]]; then - echo "USAGE: $0 [-d true|false] [-g g1] [-m true|false] [-p true|false] [-s true|false] [-j java_options]" - exit 1 -fi - -if [[ "$PRELOAD" != "true" && "$PRELOAD" != "false" ]]; then - echo "USAGE: $0 [-d true|false] [-g g1] [-m true|false] [-p true|false] [-s true|false] [-j java_options]" - exit 1 + exit_with_usage_help fi -function abs_path() { - SOURCE="${BASH_SOURCE[0]}" - while [[ -h "$SOURCE" ]]; do - DIR="$(cd -P "$(dirname "$SOURCE")" && pwd)" - SOURCE="$(readlink "$SOURCE")" - [[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" - done - cd -P "$(dirname "$SOURCE")" && pwd -} - -BIN=$(abs_path) -TOP="$(cd "$BIN"/../ && pwd)" -CONF="$TOP/conf" -LOGS="$TOP/logs" -SCRIPTS="$TOP/scripts" -PID_FILE="$BIN/pid" - -. "$BIN"/util.sh - GREMLIN_SERVER_URL=$(read_property "$CONF/rest-server.properties" "gremlinserver.url") if [ -z "$GREMLIN_SERVER_URL" ]; then GREMLIN_SERVER_URL="http://127.0.0.1:8182" diff --git a/hugegraph-server/hugegraph-dist/src/assembly/static/bin/util.sh b/hugegraph-server/hugegraph-dist/src/assembly/static/bin/util.sh index d03083388a..64980403b1 100755 --- a/hugegraph-server/hugegraph-dist/src/assembly/static/bin/util.sh +++ b/hugegraph-server/hugegraph-dist/src/assembly/static/bin/util.sh @@ -368,3 +368,8 @@ function kill_process_and_wait() { kill_process "$process_name" "$pid" wait_for_shutdown "$process_name" "$pid" "$timeout_s" } + +function exit_with_usage_help(){ + echo "USAGE: $0 [-d true|false] [-g g1] [-m true|false] [-p true|false] [-s true|false] [-j java_options] [-t timeout]" + exit 1 +} From 05c5b4b005f8411e497137e8ca6eb5beea8ee030 Mon Sep 17 00:00:00 2001 From: Wu Chencan <77946882+DanGuge@users.noreply.github.com> Date: Sun, 10 Sep 2023 16:47:45 +0800 Subject: [PATCH 07/24] feat(api-core): support label & property filtering for both edge and vertex & support kout dfs mode (#2295) - Support label & property filtering for both edge and vertex and the filtering is implemented in Kout Post and Kneighbor - Post Apis, reducing unnecessary graph searches through pruning - Support Kout dfs mode in Kout Post Api Originally only edge label filtering was supported, now label and property filtering for edge and vertex is supported. - add classes VEStepEntity and VEStep to support serialization in request - add class Steps to support filtering of edge and vertex in runtime(core) - add new method edgesOfVertex(Id source, Steps steps) to support label and property filtering for both edge and vertex in HugeTraverser.java --------- Co-authored-by: imbajin --- .../api/traversers/KneighborAPI.java | 20 +- .../hugegraph/api/traversers/KoutAPI.java | 48 +++-- .../api/traversers/TraverserAPI.java | 60 ++++++ .../apache/hugegraph/backend/query/Query.java | 1 + .../backend/tx/GraphTransaction.java | 55 ++++- .../traversal/algorithm/HugeTraverser.java | 128 +++++++++++- .../algorithm/KneighborTraverser.java | 6 +- .../traversal/algorithm/KoutTraverser.java | 40 +++- .../algorithm/iterator/NestedIterator.java | 195 ++++++++++++++++++ .../algorithm/records/KoutRecords.java | 43 +++- .../records/SingleWayMultiPathsRecords.java | 14 +- .../traversal/algorithm/steps/Steps.java | 187 +++++++++++++++++ .../traversal/optimize/TraversalUtil.java | 23 ++- .../api/traversers/KneighborApiTest.java | 22 +- .../hugegraph/api/traversers/KoutApiTest.java | 22 +- 15 files changed, 782 insertions(+), 82 deletions(-) create mode 100644 hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/iterator/NestedIterator.java create mode 100644 hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/steps/Steps.java diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/KneighborAPI.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/KneighborAPI.java index a0e7d0c4ee..8624b2dd02 100644 --- a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/KneighborAPI.java +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/KneighborAPI.java @@ -37,7 +37,7 @@ import org.apache.hugegraph.traversal.algorithm.HugeTraverser; import org.apache.hugegraph.traversal.algorithm.KneighborTraverser; import org.apache.hugegraph.traversal.algorithm.records.KneighborRecords; -import org.apache.hugegraph.traversal.algorithm.steps.EdgeStep; +import org.apache.hugegraph.traversal.algorithm.steps.Steps; import org.apache.hugegraph.type.define.Directions; import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.Log; @@ -120,7 +120,7 @@ public String post(@Context GraphManager manager, E.checkArgumentNotNull(request, "The request body can't be null"); E.checkArgumentNotNull(request.source, "The source of request can't be null"); - E.checkArgument(request.step != null, + E.checkArgument(request.steps != null, "The steps of request can't be null"); if (request.countOnly) { E.checkArgument(!request.withVertex && !request.withPath && !request.withEdge, @@ -128,9 +128,9 @@ public String post(@Context GraphManager manager, } LOG.debug("Graph [{}] get customized kneighbor from source vertex " + - "'{}', with step '{}', limit '{}', count_only '{}', " + + "'{}', with steps '{}', limit '{}', count_only '{}', " + "with_vertex '{}', with_path '{}' and with_edge '{}'", - graph, request.source, request.step, request.limit, + graph, request.source, request.steps, request.limit, request.countOnly, request.withVertex, request.withPath, request.withEdge); @@ -139,11 +139,11 @@ public String post(@Context GraphManager manager, HugeGraph g = graph(manager, graph); Id sourceId = HugeVertex.getIdValue(request.source); - EdgeStep step = step(g, request.step); + Steps steps = steps(g, request.steps); KneighborRecords results; try (KneighborTraverser traverser = new KneighborTraverser(g)) { - results = traverser.customizedKneighbor(sourceId, step, + results = traverser.customizedKneighbor(sourceId, steps, request.maxDepth, request.limit); measure.addIterCount(traverser.vertexIterCounter.get(), @@ -202,8 +202,8 @@ private static class Request { @JsonProperty("source") public Object source; - @JsonProperty("step") - public TraverserAPI.Step step; + @JsonProperty("steps") + public TraverserAPI.VESteps steps; @JsonProperty("max_depth") public int maxDepth; @JsonProperty("limit") @@ -219,9 +219,9 @@ private static class Request { @Override public String toString() { - return String.format("PathRequest{source=%s,step=%s,maxDepth=%s" + + return String.format("PathRequest{source=%s,steps=%s,maxDepth=%s" + "limit=%s,countOnly=%s,withVertex=%s," + - "withPath=%s,withEdge=%s}", this.source, this.step, + "withPath=%s,withEdge=%s}", this.source, this.steps, this.maxDepth, this.limit, this.countOnly, this.withVertex, this.withPath, this.withEdge); } diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/KoutAPI.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/KoutAPI.java index 1adf2be5eb..1f1b6922d3 100644 --- a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/KoutAPI.java +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/KoutAPI.java @@ -38,7 +38,7 @@ import org.apache.hugegraph.traversal.algorithm.HugeTraverser; import org.apache.hugegraph.traversal.algorithm.KoutTraverser; import org.apache.hugegraph.traversal.algorithm.records.KoutRecords; -import org.apache.hugegraph.traversal.algorithm.steps.EdgeStep; +import org.apache.hugegraph.traversal.algorithm.steps.Steps; import org.apache.hugegraph.type.define.Directions; import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.Log; @@ -126,18 +126,19 @@ public String post(@Context GraphManager manager, E.checkArgumentNotNull(request, "The request body can't be null"); E.checkArgumentNotNull(request.source, "The source of request can't be null"); - E.checkArgument(request.step != null, + E.checkArgument(request.steps != null, "The steps of request can't be null"); if (request.countOnly) { E.checkArgument(!request.withVertex && !request.withPath && !request.withEdge, "Can't return vertex, edge or path when count only"); } + HugeTraverser.checkTraverseMode(request.traverseMode); LOG.debug("Graph [{}] get customized kout from source vertex '{}', " + - "with step '{}', max_depth '{}', nearest '{}', " + + "with steps '{}', max_depth '{}', nearest '{}', " + "count_only '{}', capacity '{}', limit '{}', " + "with_vertex '{}', with_path '{}' and with_edge '{}'", - graph, request.source, request.step, request.maxDepth, + graph, request.source, request.steps, request.maxDepth, request.nearest, request.countOnly, request.capacity, request.limit, request.withVertex, request.withPath, request.withEdge); @@ -147,14 +148,22 @@ public String post(@Context GraphManager manager, HugeGraph g = graph(manager, graph); Id sourceId = HugeVertex.getIdValue(request.source); - EdgeStep step = step(g, request.step); + Steps steps = steps(g, request.steps); KoutRecords results; try (KoutTraverser traverser = new KoutTraverser(g)) { - results = traverser.customizedKout(sourceId, step, - request.maxDepth, - request.nearest, - request.capacity, - request.limit); + if (HugeTraverser.isTraverseModeDFS(request.traverseMode)) { + results = traverser.dfsKout(sourceId, steps, + request.maxDepth, + request.nearest, + request.capacity, + request.limit); + } else { + results = traverser.customizedKout(sourceId, steps, + request.maxDepth, + request.nearest, + request.capacity, + request.limit); + } measure.addIterCount(traverser.vertexIterCounter.get(), traverser.edgeIterCounter.get()); } @@ -172,7 +181,7 @@ public String post(@Context GraphManager manager, if (request.countOnly) { return manager.serializer(g, measure.measures()) - .writeNodesWithPath("kneighbor", neighbors, size, paths, + .writeNodesWithPath("kout", neighbors, size, paths, QueryResults.emptyIterator(), QueryResults.emptyIterator()); } @@ -210,8 +219,8 @@ private static class Request { @JsonProperty("source") public Object source; - @JsonProperty("step") - public TraverserAPI.Step step; + @JsonProperty("steps") + public TraverserAPI.VESteps steps; @JsonProperty("max_depth") public int maxDepth; @JsonProperty("nearest") @@ -228,16 +237,19 @@ private static class Request { public boolean withPath = false; @JsonProperty("with_edge") public boolean withEdge = false; + @JsonProperty("traverse_mode") + public String traverseMode = HugeTraverser.TRAVERSE_MODE_BFS; @Override public String toString() { - return String.format("KoutRequest{source=%s,step=%s,maxDepth=%s" + + return String.format("KoutRequest{source=%s,steps=%s,maxDepth=%s" + "nearest=%s,countOnly=%s,capacity=%s," + "limit=%s,withVertex=%s,withPath=%s," + - "withEdge=%s}", this.source, this.step, - this.maxDepth, this.nearest, this.countOnly, - this.capacity, this.limit, this.withVertex, - this.withPath, this.withEdge); + "withEdge=%s,traverseMode=%s}", this.source, + this.steps, this.maxDepth, this.nearest, + this.countOnly, this.capacity, this.limit, + this.withVertex, this.withPath, this.withEdge, + this.traverseMode); } } } diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/TraverserAPI.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/TraverserAPI.java index 9649ec03c9..cc61a9fadb 100644 --- a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/TraverserAPI.java +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/traversers/TraverserAPI.java @@ -19,13 +19,16 @@ import static org.apache.hugegraph.traversal.algorithm.HugeTraverser.DEFAULT_MAX_DEGREE; +import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.api.API; import org.apache.hugegraph.traversal.algorithm.steps.EdgeStep; +import org.apache.hugegraph.traversal.algorithm.steps.Steps; import org.apache.hugegraph.type.define.Directions; + import com.fasterxml.jackson.annotation.JsonAlias; import com.fasterxml.jackson.annotation.JsonProperty; @@ -36,6 +39,25 @@ protected static EdgeStep step(HugeGraph graph, Step step) { step.maxDegree, step.skipDegree); } + protected static Steps steps(HugeGraph graph, VESteps steps) { + Map> vSteps = new HashMap<>(); + if (steps.vSteps != null) { + for (VEStepEntity vStep : steps.vSteps) { + vSteps.put(vStep.label, vStep.properties); + } + } + + Map> eSteps = new HashMap<>(); + if (steps.eSteps != null) { + for (VEStepEntity eStep : steps.eSteps) { + eSteps.put(eStep.label, eStep.properties); + } + } + + return new Steps(graph, steps.direction, vSteps, eSteps, + steps.maxDegree, steps.skipDegree); + } + protected static class Step { @JsonProperty("direction") @@ -58,4 +80,42 @@ public String toString() { this.maxDegree, this.skipDegree); } } + + protected static class VEStepEntity { + + @JsonProperty("label") + public String label; + + @JsonProperty("properties") + public Map properties; + + @Override + public String toString() { + return String.format("VEStepEntity{label=%s,properties=%s}", + this.label, this.properties); + } + } + + protected static class VESteps { + + @JsonProperty("direction") + public Directions direction; + @JsonAlias("degree") + @JsonProperty("max_degree") + public long maxDegree = Long.parseLong(DEFAULT_MAX_DEGREE); + @JsonProperty("skip_degree") + public long skipDegree = 0L; + @JsonProperty("vertex_steps") + public List vSteps; + @JsonProperty("edge_steps") + public List eSteps; + + @Override + public String toString() { + return String.format("Steps{direction=%s,maxDegree=%s," + + "skipDegree=%s,vSteps=%s,eSteps=%s}", + this.direction, this.maxDegree, + this.skipDegree, this.vSteps, this.eSteps); + } + } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/Query.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/Query.java index 518ac5303a..32f416b371 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/Query.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/Query.java @@ -306,6 +306,7 @@ public boolean reachLimit(long count) { /** * Set or update the offset and limit by a range [start, end) * NOTE: it will use the min range one: max start and min end + * * @param start the range start, include it * @param end the range end, exclude it */ diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/GraphTransaction.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/GraphTransaction.java index 7b5289237b..c9502c8097 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/GraphTransaction.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/GraphTransaction.java @@ -690,7 +690,7 @@ public void removeVertex(HugeVertex vertex) { // Override vertices in local `addedVertices` this.addedVertices.remove(vertex.id()); // Force load vertex to ensure all properties are loaded (refer to #2181) - if (vertex.schemaLabel().indexLabels().size() > 0) { + if (vertex.schemaLabel().indexLabels().size() > 0) { vertex.forceLoad(); } // Collect the removed vertex @@ -971,7 +971,7 @@ protected Iterator queryEdgesByIds(Object[] edgeIds, * local vertex and duplicated id. */ Iterator it = this.queryEdgesFromBackend(query); - @SuppressWarnings({ "unchecked", "rawtypes" }) + @SuppressWarnings({"unchecked", "rawtypes"}) Iterator r = (Iterator) it; return r; } @@ -1229,9 +1229,10 @@ public void removeEdgeProperty(HugeEdgeProperty prop) { /** * Construct one edge condition query based on source vertex, direction and * edge labels + * * @param sourceVertex source vertex of edge - * @param direction only be "IN", "OUT" or "BOTH" - * @param edgeLabels edge labels of queried edges + * @param direction only be "IN", "OUT" or "BOTH" + * @param edgeLabels edge labels of queried edges * @return constructed condition query */ @Watched @@ -1264,8 +1265,39 @@ public static ConditionQuery constructEdgesQuery(Id sourceVertex, } else if (edgeLabels.length > 1) { query.query(Condition.in(HugeKeys.LABEL, Arrays.asList(edgeLabels))); + } + + return query; + } + + public static ConditionQuery constructEdgesQuery(Id sourceVertex, + Directions direction, + List edgeLabels) { + E.checkState(sourceVertex != null, + "The edge query must contain source vertex"); + E.checkState(direction != null, + "The edge query must contain direction"); + + ConditionQuery query = new ConditionQuery(HugeType.EDGE); + + // Edge source vertex + query.eq(HugeKeys.OWNER_VERTEX, sourceVertex); + + // Edge direction + if (direction == Directions.BOTH) { + query.query(Condition.or( + Condition.eq(HugeKeys.DIRECTION, Directions.OUT), + Condition.eq(HugeKeys.DIRECTION, Directions.IN))); } else { - assert edgeLabels.length == 0; + assert direction == Directions.OUT || direction == Directions.IN; + query.eq(HugeKeys.DIRECTION, direction); + } + + // Edge labels + if (edgeLabels.size() == 1) { + query.eq(HugeKeys.LABEL, edgeLabels.get(0)); + } else if (edgeLabels.size() > 1) { + query.query(Condition.in(HugeKeys.LABEL, edgeLabels)); } return query; @@ -1397,8 +1429,8 @@ private QueryList optimizeQueries(Query query, } boolean supportIn = this.storeFeatures().supportsQueryWithInCondition(); - for (ConditionQuery cq: ConditionQueryFlatten.flatten( - (ConditionQuery) query, supportIn)) { + for (ConditionQuery cq : ConditionQueryFlatten.flatten( + (ConditionQuery) query, supportIn)) { // Optimize by sysprop Query q = this.optimizeQuery(cq); /* @@ -1421,7 +1453,7 @@ private Query optimizeQuery(ConditionQuery query) { "Not supported querying by id and conditions: %s", query); } - Id label = (Id) query.condition(HugeKeys.LABEL); + Id label = query.condition(HugeKeys.LABEL); // Optimize vertex query if (label != null && query.resultType().isVertex()) { @@ -1614,6 +1646,7 @@ private void checkNonnullProperty(HugeVertex vertex) { @SuppressWarnings("unchecked") Collection missed = CollectionUtils.subtract(nonNullKeys, keys); HugeGraph graph = this.graph(); + E.checkArgument(false, "All non-null property keys %s of " + "vertex label '%s' must be set, missed keys %s", graph.mapPkId2Name(nonNullKeys), vertexLabel.name(), @@ -1837,9 +1870,9 @@ private Iterator joinTxVertices(Query query, // Filter vertices matched conditions return q.test(v) ? v : null; }; - vertices = this.joinTxRecords(query, vertices, matchTxFunc, - this.addedVertices, this.removedVertices, - this.updatedVertices); + vertices = this.joinTxRecords(query, vertices, matchTxFunc, + this.addedVertices, this.removedVertices, + this.updatedVertices); return vertices; } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/HugeTraverser.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/HugeTraverser.java index c0d36f31bd..f5415d9c51 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/HugeTraverser.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/HugeTraverser.java @@ -17,6 +17,7 @@ package org.apache.hugegraph.traversal.algorithm; +import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; @@ -48,7 +49,10 @@ import org.apache.hugegraph.perf.PerfUtil.Watched; import org.apache.hugegraph.schema.SchemaLabel; import org.apache.hugegraph.structure.HugeEdge; +import org.apache.hugegraph.structure.HugeVertex; +import org.apache.hugegraph.traversal.algorithm.iterator.NestedIterator; import org.apache.hugegraph.traversal.algorithm.steps.EdgeStep; +import org.apache.hugegraph.traversal.algorithm.steps.Steps; import org.apache.hugegraph.traversal.optimize.TraversalUtil; import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.type.define.CollectionType; @@ -85,6 +89,9 @@ public class HugeTraverser { // Empirical value of scan limit, with which results can be returned in 3s public static final String DEFAULT_PAGE_LIMIT = "100000"; public static final long NO_LIMIT = -1L; + // traverse mode of kout algorithm: bfs and dfs + public static final String TRAVERSE_MODE_BFS = "breadth_first_search"; + public static final String TRAVERSE_MODE_DFS = "depth_first_search"; protected static final Logger LOG = Log.logger(HugeTraverser.class); protected static final int MAX_VERTICES = 10; private static CollectionFactory collectionFactory; @@ -164,6 +171,17 @@ public static void checkSkipDegree(long skipDegree, long degree, } } + public static void checkTraverseMode(String traverseMode) { + E.checkArgument(traverseMode.compareToIgnoreCase(TRAVERSE_MODE_BFS) == 0 || + traverseMode.compareToIgnoreCase(TRAVERSE_MODE_DFS) == 0, + "The traverse mode must be one of '%s' or '%s', but got '%s'", + TRAVERSE_MODE_BFS, TRAVERSE_MODE_DFS, traverseMode); + } + + public static boolean isTraverseModeDFS(String traverseMode) { + return traverseMode.compareToIgnoreCase(TRAVERSE_MODE_DFS) == 0; + } + public static > Map topN( Map map, boolean sorted, @@ -272,6 +290,15 @@ protected static List joinPath(Node prev, Node back, boolean ring) { return path; } + public static List pathEdges(Iterator iterator, HugeEdge edge) { + List edges = new ArrayList<>(); + if (iterator instanceof NestedIterator) { + edges = ((NestedIterator) iterator).pathEdges(); + } + edges.add(edge); + return edges; + } + public HugeGraph graph() { return this.graph; } @@ -438,6 +465,93 @@ private Iterator edgesOfVertex(Id source, EdgeStep edgeStep, return edgeStep.skipSuperNodeIfNeeded(edges); } + public Iterator edgesOfVertex(Id source, Steps steps) { + List edgeLabels = steps.edgeLabels(); + ConditionQuery cq = GraphTransaction.constructEdgesQuery( + source, steps.direction(), edgeLabels); + cq.capacity(Query.NO_CAPACITY); + if (steps.limit() != NO_LIMIT) { + cq.limit(steps.limit()); + } + + Map edgeConditions = + getFilterQueryConditions(steps.edgeSteps(), HugeType.EDGE); + + Iterator filteredEdges = + new FilterIterator<>(this.graph().edges(cq), + edge -> validateEdge(edgeConditions, (HugeEdge) edge)); + + return edgesOfVertexStep(filteredEdges, steps); + } + + protected Iterator edgesOfVertexStep(Iterator edges, Steps steps) { + if (steps.isVertexEmpty()) { + return edges; + } + + Map vertexConditions = + getFilterQueryConditions(steps.vertexSteps(), HugeType.VERTEX); + + return new FilterIterator<>(edges, + edge -> validateVertex(vertexConditions, (HugeEdge) edge)); + } + + private Boolean validateVertex(Map conditions, + HugeEdge edge) { + HugeVertex sourceV = edge.sourceVertex(); + HugeVertex targetV = edge.targetVertex(); + if (!conditions.containsKey(sourceV.schemaLabel().id()) || + !conditions.containsKey(targetV.schemaLabel().id())) { + return false; + } + + ConditionQuery cq = conditions.get(sourceV.schemaLabel().id()); + if (cq != null) { + sourceV = (HugeVertex) this.graph.vertex(sourceV.id()); + if (!cq.test(sourceV)) { + return false; + } + } + + cq = conditions.get(targetV.schemaLabel().id()); + if (cq != null) { + targetV = (HugeVertex) this.graph.vertex(targetV.id()); + return cq.test(targetV); + } + return true; + } + + private Boolean validateEdge(Map conditions, + HugeEdge edge) { + if (!conditions.containsKey(edge.schemaLabel().id())) { + return false; + } + + ConditionQuery cq = conditions.get(edge.schemaLabel().id()); + if (cq != null) { + return cq.test(edge); + } + return true; + } + + private Map getFilterQueryConditions( + Map idStepEntityMap, HugeType type) { + Map conditions = new HashMap<>(); + + for (Map.Entry entry : idStepEntityMap.entrySet()) { + Steps.StepEntity stepEntity = entry.getValue(); + if (stepEntity.properties() != null && !stepEntity.properties().isEmpty()) { + ConditionQuery cq = new ConditionQuery(type); + Map properties = stepEntity.properties(); + TraversalUtil.fillConditionQuery(cq, properties, this.graph); + conditions.put(entry.getKey(), cq); + } else { + conditions.put(entry.getKey(), null); + } + } + return conditions; + } + private void fillFilterBySortKeys(Query query, Id[] edgeLabels, Map properties) { if (properties == null || properties.isEmpty()) { @@ -513,6 +627,19 @@ protected void checkVertexExist(Id vertexId, String name) { } } + public Iterator createNestedIterator(Id sourceV, Steps steps, + int depth, Set visited, boolean nearest) { + E.checkArgument(depth > 0, "The depth should large than 0 for nested iterator"); + visited.add(sourceV); + + // build a chained iterator path with length of depth + Iterator iterator = this.edgesOfVertex(sourceV, steps); + for (int i = 1; i < depth; i++) { + iterator = new NestedIterator(this, iterator, steps, visited, nearest); + } + return iterator; + } + public static class Node { private final Id id; @@ -876,6 +1003,5 @@ public Set getEdges(Iterator vertexIter) { } return edges; } - } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/KneighborTraverser.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/KneighborTraverser.java index b3ae29ac8f..9f16f480b2 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/KneighborTraverser.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/KneighborTraverser.java @@ -25,7 +25,7 @@ import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.structure.HugeEdge; import org.apache.hugegraph.traversal.algorithm.records.KneighborRecords; -import org.apache.hugegraph.traversal.algorithm.steps.EdgeStep; +import org.apache.hugegraph.traversal.algorithm.steps.Steps; import org.apache.hugegraph.type.define.Directions; import org.apache.hugegraph.util.E; import org.apache.tinkerpop.gremlin.structure.Edge; @@ -69,7 +69,7 @@ public Set kneighbor(Id sourceV, Directions dir, return all; } - public KneighborRecords customizedKneighbor(Id source, EdgeStep step, + public KneighborRecords customizedKneighbor(Id source, Steps steps, int maxDepth, long limit) { E.checkNotNull(source, "source vertex id"); this.checkVertexExist(source, "source vertex"); @@ -85,7 +85,7 @@ public KneighborRecords customizedKneighbor(Id source, EdgeStep step, if (this.reachLimit(limit, records.size())) { return; } - Iterator edges = edgesOfVertex(v, step); + Iterator edges = edgesOfVertex(v, steps); this.vertexIterCounter.addAndGet(1L); while (!this.reachLimit(limit, records.size()) && edges.hasNext()) { HugeEdge edge = (HugeEdge) edges.next(); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/KoutTraverser.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/KoutTraverser.java index 9f40be8fbd..9924c766c5 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/KoutTraverser.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/KoutTraverser.java @@ -26,7 +26,7 @@ import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.structure.HugeEdge; import org.apache.hugegraph.traversal.algorithm.records.KoutRecords; -import org.apache.hugegraph.traversal.algorithm.steps.EdgeStep; +import org.apache.hugegraph.traversal.algorithm.steps.Steps; import org.apache.hugegraph.type.define.Directions; import org.apache.hugegraph.util.E; import org.apache.tinkerpop.gremlin.structure.Edge; @@ -97,7 +97,7 @@ public Set kout(Id sourceV, Directions dir, String label, return latest; } - public KoutRecords customizedKout(Id source, EdgeStep step, + public KoutRecords customizedKout(Id source, Steps steps, int maxDepth, boolean nearest, long capacity, long limit) { E.checkNotNull(source, "source vertex id"); @@ -109,13 +109,13 @@ public KoutRecords customizedKout(Id source, EdgeStep step, depth[0] = maxDepth; boolean concurrent = maxDepth >= this.concurrentDepth(); - KoutRecords records = new KoutRecords(concurrent, source, nearest); + KoutRecords records = new KoutRecords(concurrent, source, nearest, 0); Consumer consumer = v -> { if (this.reachLimit(limit, depth[0], records.size())) { return; } - Iterator edges = edgesOfVertex(v, step); + Iterator edges = edgesOfVertex(v, steps); this.vertexIterCounter.addAndGet(1L); while (!this.reachLimit(limit, depth[0], records.size()) && edges.hasNext()) { @@ -138,6 +138,38 @@ public KoutRecords customizedKout(Id source, EdgeStep step, return records; } + public KoutRecords dfsKout(Id source, Steps steps, + int maxDepth, boolean nearest, + long capacity, long limit) { + E.checkNotNull(source, "source vertex id"); + this.checkVertexExist(source, "source vertex"); + checkPositive(maxDepth, "k-out max_depth"); + checkCapacity(capacity); + checkLimit(limit); + + Set all = newIdSet(); + all.add(source); + + KoutRecords records = new KoutRecords(false, source, nearest, maxDepth); + Iterator iterator = this.createNestedIterator(source, steps, maxDepth, all, nearest); + while (iterator.hasNext()) { + HugeEdge edge = (HugeEdge) iterator.next(); + this.edgeIterCounter.addAndGet(1L); + + Id target = edge.id().otherVertexId(); + if (!nearest || !all.contains(target)) { + records.addFullPath(HugeTraverser.pathEdges(iterator, edge)); + } + + if (limit != NO_LIMIT && records.size() >= limit || + capacity != NO_LIMIT && all.size() > capacity) { + break; + } + } + + return records; + } + private void checkCapacity(long capacity, long accessed, long depth) { if (capacity == NO_LIMIT) { return; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/iterator/NestedIterator.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/iterator/NestedIterator.java new file mode 100644 index 0000000000..3b9f037940 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/iterator/NestedIterator.java @@ -0,0 +1,195 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.traversal.algorithm.iterator; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.iterator.WrappedIterator; +import org.apache.hugegraph.structure.HugeEdge; +import org.apache.hugegraph.traversal.algorithm.HugeTraverser; +import org.apache.hugegraph.traversal.algorithm.steps.Steps; +import org.apache.hugegraph.util.collection.ObjectIntMapping; +import org.apache.hugegraph.util.collection.ObjectIntMappingFactory; +import org.apache.tinkerpop.gremlin.structure.Edge; + +public class NestedIterator extends WrappedIterator { + + private final int MAX_CACHED_COUNT = 1000; + /** + * Set visited: visited vertex-ids of all parent-tree + * used to exclude visited vertex + */ + private final boolean nearest; + private final Set visited; + private final int MAX_VISITED_COUNT = 100000; + + // cache for edges, initial capacity to avoid memory fragment + private final List cache; + private final Map parentEdgePointerMap; + + private final Iterator parentIterator; + private final HugeTraverser traverser; + private final Steps steps; + private final ObjectIntMapping idMapping; + private HugeEdge currentEdge; + private int cachePointer; + private Iterator currentIterator; + + public NestedIterator(HugeTraverser traverser, + Iterator parentIterator, + Steps steps, + Set visited, + boolean nearest) { + this.traverser = traverser; + this.parentIterator = parentIterator; + this.steps = steps; + this.visited = visited; + this.nearest = nearest; + + this.cache = new ArrayList<>(MAX_CACHED_COUNT); + this.parentEdgePointerMap = new HashMap<>(); + + this.cachePointer = 0; + this.currentEdge = null; + this.currentIterator = null; + + this.idMapping = ObjectIntMappingFactory.newObjectIntMapping(false); + } + + private static Long makeVertexPairIndex(int source, int target) { + return ((long) source & 0xFFFFFFFFL) | + (((long) target << 32) & 0xFFFFFFFF00000000L); + } + + @Override + public boolean hasNext() { + if (this.currentIterator == null || !this.currentIterator.hasNext()) { + return fetch(); + } + return true; + } + + @Override + public Edge next() { + return this.currentIterator.next(); + } + + @Override + protected Iterator originIterator() { + return this.parentIterator; + } + + @Override + protected boolean fetch() { + while (this.currentIterator == null || !this.currentIterator.hasNext()) { + if (this.currentIterator != null) { + this.currentIterator = null; + } + + if (this.cache.size() == this.cachePointer && !this.fillCache()) { + return false; + } + + this.currentEdge = this.cache.get(this.cachePointer); + this.cachePointer++; + this.currentIterator = + traverser.edgesOfVertex(this.currentEdge.id().otherVertexId(), steps); + this.traverser.vertexIterCounter.addAndGet(1L); + + } + return true; + } + + private boolean fillCache() { + // fill cache from parent + while (this.parentIterator.hasNext() && this.cache.size() < MAX_CACHED_COUNT) { + HugeEdge edge = (HugeEdge) this.parentIterator.next(); + Id vertexId = edge.id().otherVertexId(); + + this.traverser.edgeIterCounter.addAndGet(1L); + + if (!this.nearest || !this.visited.contains(vertexId)) { + // update parent edge cache pointer + int parentEdgePointer = -1; + if (this.parentIterator instanceof NestedIterator) { + parentEdgePointer = ((NestedIterator) this.parentIterator).currentEdgePointer(); + } + + this.parentEdgePointerMap.put(makeEdgeIndex(edge), parentEdgePointer); + + this.cache.add(edge); + if (this.visited.size() < MAX_VISITED_COUNT) { + this.visited.add(vertexId); + } + } + } + return this.cache.size() > this.cachePointer; + } + + public List pathEdges() { + List edges = new ArrayList<>(); + HugeEdge currentEdge = this.currentEdge; + if (this.parentIterator instanceof NestedIterator) { + NestedIterator parent = (NestedIterator) this.parentIterator; + int parentEdgePointer = this.parentEdgePointerMap.get(makeEdgeIndex(currentEdge)); + edges.addAll(parent.pathEdges(parentEdgePointer)); + } + edges.add(currentEdge); + return edges; + } + + private List pathEdges(int edgePointer) { + List edges = new ArrayList<>(); + HugeEdge edge = this.cache.get(edgePointer); + if (this.parentIterator instanceof NestedIterator) { + NestedIterator parent = (NestedIterator) this.parentIterator; + int parentEdgePointer = this.parentEdgePointerMap.get(makeEdgeIndex(edge)); + edges.addAll(parent.pathEdges(parentEdgePointer)); + } + edges.add(edge); + return edges; + } + + public int currentEdgePointer() { + return this.cachePointer - 1; + } + + private Long makeEdgeIndex(HugeEdge edge) { + int sourceV = this.code(edge.id().ownerVertexId()); + int targetV = this.code(edge.id().otherVertexId()); + return makeVertexPairIndex(sourceV, targetV); + } + + private int code(Id id) { + if (id.number()) { + long l = id.asLong(); + if (0 <= l && l <= Integer.MAX_VALUE) { + return (int) l; + } + } + int code = this.idMapping.object2Code(id); + assert code > 0; + return -code; + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/records/KoutRecords.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/records/KoutRecords.java index 5953e71e2a..e4264893a5 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/records/KoutRecords.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/records/KoutRecords.java @@ -23,6 +23,7 @@ import java.util.Stack; import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.structure.HugeEdge; import org.apache.hugegraph.traversal.algorithm.HugeTraverser.PathSet; import org.apache.hugegraph.traversal.algorithm.records.record.Record; import org.apache.hugegraph.traversal.algorithm.records.record.RecordType; @@ -32,8 +33,23 @@ public class KoutRecords extends SingleWayMultiPathsRecords { - public KoutRecords(boolean concurrent, Id source, boolean nearest) { + // Non-zero depth is used for deepFirst traverse mode. + // In such case, startOneLayer/finishOneLayer should not be called, + // instead, we should use addFullPath + private final int depth; + + public KoutRecords(boolean concurrent, Id source, boolean nearest, int depth) { super(RecordType.INT, concurrent, source, nearest); + + // add depth(num) records to record each layer + this.depth = depth; + for (int i = 0; i < depth; i++) { + this.records().push(this.newRecord()); + } + assert (this.records().size() == (depth + 1)); + + // init top layer's parentRecord + this.currentRecord(this.records().peek(), null); } @Override @@ -61,4 +77,29 @@ public PathSet paths(long limit) { } return paths; } + + public void addFullPath(List edges) { + assert (depth == edges.size()); + + int sourceCode = this.code(edges.get(0).id().ownerVertexId()); + int targetCode; + for (int i = 0; i < edges.size(); i++) { + HugeEdge edge = edges.get(i); + Id sourceV = edge.id().ownerVertexId(); + Id targetV = edge.id().otherVertexId(); + + assert (this.code(sourceV) == sourceCode); + + this.edgeResults().addEdge(sourceV, targetV, edge); + + targetCode = this.code(targetV); + Record record = this.records().elementAt(i + 1); + if (this.sourceCode == targetCode) { + break; + } + + this.addPathToRecord(sourceCode, targetCode, record); + sourceCode = targetCode; + } + } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/records/SingleWayMultiPathsRecords.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/records/SingleWayMultiPathsRecords.java index d41adc92a8..fad78edf07 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/records/SingleWayMultiPathsRecords.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/records/SingleWayMultiPathsRecords.java @@ -40,9 +40,8 @@ public abstract class SingleWayMultiPathsRecords extends AbstractRecords { + protected final int sourceCode; private final Stack records; - - private final int sourceCode; private final boolean nearest; private final IntSet accessedVertices; private final EdgeRecord edgeResults; @@ -110,15 +109,16 @@ public Iterator keys() { @Watched public void addPath(Id source, Id target) { - int sourceCode = this.code(source); - int targetCode = this.code(target); + this.addPathToRecord(this.code(source), this.code(target), this.currentRecord()); + } + + public void addPathToRecord(int sourceCode, int targetCode, Record record) { if (this.nearest && this.accessedVertices.contains(targetCode) || - !this.nearest && this.currentRecord().containsKey(targetCode) || + !this.nearest && record.containsKey(targetCode) || targetCode == this.sourceCode) { return; } - this.currentRecord().addPath(targetCode, sourceCode); - + record.addPath(targetCode, sourceCode); this.accessedVertices.add(targetCode); } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/steps/Steps.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/steps/Steps.java new file mode 100644 index 0000000000..d1a9238be1 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/steps/Steps.java @@ -0,0 +1,187 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.traversal.algorithm.steps; + +import static org.apache.hugegraph.traversal.algorithm.HugeTraverser.NO_LIMIT; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.hugegraph.HugeGraph; +import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.schema.EdgeLabel; +import org.apache.hugegraph.schema.VertexLabel; +import org.apache.hugegraph.traversal.algorithm.HugeTraverser; +import org.apache.hugegraph.traversal.optimize.TraversalUtil; +import org.apache.hugegraph.type.define.Directions; +import org.apache.hugegraph.util.E; + +public class Steps { + + protected final Map edgeSteps; + protected final Map vertexSteps; + protected final Directions direction; + protected final long degree; + protected final long skipDegree; + + public Steps(HugeGraph graph, Directions direction, + Map> vSteps, + Map> eSteps, + long degree, long skipDegree) { + E.checkArgument(degree == NO_LIMIT || degree > 0L, + "The max degree must be > 0 or == -1, but got: %s", degree); + HugeTraverser.checkSkipDegree(skipDegree, degree, NO_LIMIT); + + this.direction = direction; + + // parse vertex steps + this.vertexSteps = new HashMap<>(); + if (vSteps != null && !vSteps.isEmpty()) { + initVertexFilter(graph, vSteps); + } + + // parse edge steps + this.edgeSteps = new HashMap<>(); + if (eSteps != null && !eSteps.isEmpty()) { + initEdgeFilter(graph, eSteps); + } + + this.degree = degree; + this.skipDegree = skipDegree; + } + + private void initVertexFilter(HugeGraph graph, Map> vSteps) { + for (Map.Entry> entry : vSteps.entrySet()) { + if (checkEntryEmpty(entry)) { + continue; + } + E.checkArgument(entry.getKey() != null && !entry.getKey().isEmpty(), + "The vertex step label could not be null"); + + VertexLabel vertexLabel = graph.vertexLabel(entry.getKey()); + StepEntity stepEntity = handleStepEntity(graph, entry, vertexLabel.id()); + this.vertexSteps.put(vertexLabel.id(), stepEntity); + } + } + + private void initEdgeFilter(HugeGraph graph, Map> eSteps) { + for (Map.Entry> entry : eSteps.entrySet()) { + if (checkEntryEmpty(entry)) { + continue; + } + E.checkArgument(entry.getKey() != null && !entry.getKey().isEmpty(), + "The edge step label could not be null"); + + EdgeLabel edgeLabel = graph.edgeLabel(entry.getKey()); + StepEntity stepEntity = handleStepEntity(graph, entry, edgeLabel.id()); + this.edgeSteps.put(edgeLabel.id(), stepEntity); + } + } + + private StepEntity handleStepEntity(HugeGraph graph, + Map.Entry> entry, + Id id) { + Map properties = null; + if (entry.getValue() != null) { + properties = TraversalUtil.transProperties(graph, entry.getValue()); + } + return new StepEntity(id, entry.getKey(), properties); + } + + private boolean checkEntryEmpty(Map.Entry> entry) { + return (entry.getKey() == null || entry.getKey().isEmpty()) && + (entry.getValue() == null || entry.getValue().isEmpty()); + } + + public long degree() { + return this.degree; + } + + public Map edgeSteps() { + return this.edgeSteps; + } + + public Map vertexSteps() { + return this.vertexSteps; + } + + public long skipDegree() { + return this.skipDegree; + } + + public Directions direction() { + return this.direction; + } + + public long limit() { + return this.skipDegree > 0L ? this.skipDegree : this.degree; + } + + public List edgeLabels() { + return new ArrayList<>(this.edgeSteps.keySet()); + } + + public boolean isVertexEmpty() { + return this.vertexSteps.isEmpty(); + } + + @Override + public String toString() { + return "Steps{" + + "edgeSteps=" + this.edgeSteps + + ", vertexSteps=" + this.vertexSteps + + ", direction=" + this.direction + + ", degree=" + this.degree + + ", skipDegree=" + this.skipDegree + + '}'; + } + + public static class StepEntity { + + protected final Id id; + protected final String label; + protected final Map properties; + + public StepEntity(Id id, String label, Map properties) { + this.id = id; + this.label = label; + this.properties = properties; + } + + public Id id() { + return this.id; + } + + public String label() { + return this.label; + } + + public Map properties() { + return this.properties; + } + + @Override + public String toString() { + return String.format("StepEntity{id=%s,label=%s," + + "properties=%s}", this.id, + this.label, this.properties); + } + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/TraversalUtil.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/TraversalUtil.java index 77de342583..99f45fc1cd 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/TraversalUtil.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/TraversalUtil.java @@ -39,11 +39,18 @@ import org.apache.hugegraph.backend.query.Condition; import org.apache.hugegraph.backend.query.ConditionQuery; import org.apache.hugegraph.backend.query.Query; +import org.apache.hugegraph.exception.NotSupportException; +import org.apache.hugegraph.iterator.FilterIterator; import org.apache.hugegraph.schema.PropertyKey; import org.apache.hugegraph.schema.SchemaLabel; +import org.apache.hugegraph.structure.HugeElement; +import org.apache.hugegraph.structure.HugeProperty; import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.type.define.Directions; import org.apache.hugegraph.type.define.HugeKeys; +import org.apache.hugegraph.util.CollectionUtil; +import org.apache.hugegraph.util.DateUtil; +import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.JsonUtil; import org.apache.tinkerpop.gremlin.process.traversal.Compare; import org.apache.tinkerpop.gremlin.process.traversal.Contains; @@ -83,13 +90,6 @@ import org.apache.tinkerpop.gremlin.structure.Vertex; import org.apache.tinkerpop.gremlin.structure.util.empty.EmptyGraph; -import org.apache.hugegraph.exception.NotSupportException; -import org.apache.hugegraph.iterator.FilterIterator; -import org.apache.hugegraph.structure.HugeElement; -import org.apache.hugegraph.structure.HugeProperty; -import org.apache.hugegraph.util.CollectionUtil; -import org.apache.hugegraph.util.DateUtil; -import org.apache.hugegraph.util.E; import com.google.common.collect.ImmutableList; public final class TraversalUtil { @@ -146,7 +146,8 @@ public static void trySetGraph(Step step, HugeGraph graph) { } local.setGraph(graph); } - for (final Traversal.Admin global : ((TraversalParent) step).getGlobalChildren()) { + for (final Traversal.Admin global : + ((TraversalParent) step).getGlobalChildren()) { if (global.getGraph().filter(g -> !(g instanceof EmptyGraph)).isPresent()) { continue; } @@ -690,7 +691,7 @@ private static boolean isSysProp(String key) { return token2HugeKey(key) != null; } - @SuppressWarnings({ "unchecked", "rawtypes" }) + @SuppressWarnings({"unchecked", "rawtypes"}) private static void collectPredicates(List> results, List> predicates) { for (P p : predicates) { @@ -781,7 +782,7 @@ private static V validPropertyValue(V value, PropertyKey pkey) { public static void retrieveSysprop(List hasContainers, Function func) { - for (Iterator iter = hasContainers.iterator(); iter.hasNext();) { + for (Iterator iter = hasContainers.iterator(); iter.hasNext(); ) { HasContainer container = iter.next(); if (container.getKey().startsWith("~") && func.apply(container)) { iter.remove(); @@ -842,7 +843,7 @@ public static boolean testProperty(Property prop, Object expected) { public static Map transProperties(HugeGraph graph, Map props) { Map pks = new HashMap<>(props.size()); - for (Map.Entry e: props.entrySet()) { + for (Map.Entry e : props.entrySet()) { PropertyKey pk = graph.propertyKey(e.getKey()); pks.put(pk.id(), e.getValue()); } diff --git a/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/api/traversers/KneighborApiTest.java b/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/api/traversers/KneighborApiTest.java index f207b59b22..e415fa568f 100644 --- a/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/api/traversers/KneighborApiTest.java +++ b/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/api/traversers/KneighborApiTest.java @@ -20,15 +20,16 @@ import java.util.List; import java.util.Map; -import jakarta.ws.rs.core.Response; +import org.apache.hugegraph.api.BaseApiTest; import org.junit.Assert; import org.junit.Before; import org.junit.Test; -import org.apache.hugegraph.api.BaseApiTest; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; +import jakarta.ws.rs.core.Response; + public class KneighborApiTest extends BaseApiTest { static final String PATH = TRAVERSERS_API + "/kneighbor"; @@ -64,13 +65,18 @@ public void testPost() { String markoId = name2Ids.get("marko"); String reqBody = String.format("{ " + "\"source\": \"%s\", " + - "\"step\": { " + + "\"steps\": { " + " \"direction\": \"BOTH\", " + - " \"labels\": [\"knows\", " + - " \"created\"], " + - "\"properties\": { " + - " \"weight\": \"P.gt(0.1)\"}, " + - " \"degree\": 10000, " + + " \"edge_steps\": [" + + " {\"label\": \"knows\"," + + " \"properties\": {" + + " \"weight\": \"P.gt(0.1)\"}}," + + " {\"label\": \"created\"," + + " \"properties\": {" + + " \"weight\": \"P.gt(0.1)\"}}" + + " ], " + + " \"vertex_steps\": []," + + " \"max_degree\": 10000, " + " \"skip_degree\": 100000}, " + "\"max_depth\": 3, " + "\"limit\": 10000, " + diff --git a/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/api/traversers/KoutApiTest.java b/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/api/traversers/KoutApiTest.java index 2fb8a6466b..11544e3afc 100644 --- a/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/api/traversers/KoutApiTest.java +++ b/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/api/traversers/KoutApiTest.java @@ -20,15 +20,16 @@ import java.util.List; import java.util.Map; -import jakarta.ws.rs.core.Response; +import org.apache.hugegraph.api.BaseApiTest; import org.junit.Assert; import org.junit.Before; import org.junit.Test; -import org.apache.hugegraph.api.BaseApiTest; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import jakarta.ws.rs.core.Response; + public class KoutApiTest extends BaseApiTest { static final String PATH = TRAVERSERS_API + "/kout"; @@ -75,13 +76,18 @@ public void testPost() { String markoId = name2Ids.get("marko"); String reqBody = String.format("{ " + "\"source\": \"%s\", " + - "\"step\": { " + + "\"steps\": { " + " \"direction\": \"BOTH\", " + - " \"labels\": [\"knows\", " + - " \"created\"], " + - "\"properties\": { " + - " \"weight\": \"P.gt(0.1)\"}, " + - " \"degree\": 10000, " + + " \"edge_steps\": [" + + " {\"label\": \"knows\"," + + " \"properties\": {" + + " \"weight\": \"P.gt(0.1)\"}}," + + " {\"label\": \"created\"," + + " \"properties\": {" + + " \"weight\": \"P.gt(0.1)\"}}" + + " ], " + + " \"vertex_steps\": []," + + " \"max_degree\": 10000, " + " \"skip_degree\": 100000}, " + "\"max_depth\": 1, " + "\"nearest\": true, " + From 20f971fcda37777f3d978239639114def17cf373 Mon Sep 17 00:00:00 2001 From: M <87920097+msgui@users.noreply.github.com> Date: Mon, 11 Sep 2023 10:48:37 +0800 Subject: [PATCH 08/24] fix: base-ref/head-ref missed in dependency-review on master (#2308) --- .github/workflows/check-dependencies.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/check-dependencies.yml b/.github/workflows/check-dependencies.yml index fbc8633a59..b994fc1430 100644 --- a/.github/workflows/check-dependencies.yml +++ b/.github/workflows/check-dependencies.yml @@ -3,7 +3,6 @@ name: "3rd-party" on: push: branches: - - master - /^release-.*$/ pull_request: From c9fdf782da7f7fc6c2a092f4cc17c4d6b6f3620f Mon Sep 17 00:00:00 2001 From: Dandelion <49650772+aroundabout@users.noreply.github.com> Date: Mon, 2 Oct 2023 13:14:01 +0800 Subject: [PATCH 09/24] doc: update README about start server with example graph (#2315) --- README.md | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index edc7c97bba..d50235bbec 100644 --- a/README.md +++ b/README.md @@ -38,7 +38,10 @@ Billions of vertices and edges can be easily stored into and queried from HugeGr We can use `docker run -itd --name=graph -p 8080:8080 hugegraph/hugegraph` to quickly start an inner HugeGraph server with `RocksDB` in background. -Optional: use `docker exec -it graph bash` to enter the container to do some operations. +Optional: + +1. use `docker exec -it graph bash` to enter the container to do some operations. +2. use `docker run -itd --name=graph -p 8080:8080 -e PRELOAD="true" hugegraph/hugegraph` to start with a **built-in** (example) graph. ### 2. Download Way @@ -54,7 +57,7 @@ The project [doc page](https://hugegraph.apache.org/docs/) contains more informa and provides detailed documentation for users. (Structure / Usage / API / Configs...) And here are links of other **HugeGraph** component/repositories: -1. [hugegraph-toolchain](https://github.com/apache/incubator-hugegraph-toolchain) (graph **loader/dashboard/tool/client**) +1. [hugegraph-toolchain](https://github.com/apache/incubator-hugegraph-toolchain) (graph **[loader](https://github.com/apache/incubator-hugegraph-toolchain/tree/master/hugegraph-loader)/[dashboard](https://github.com/apache/incubator-hugegraph-toolchain/tree/master/hugegraph-hubble)/[tool](https://github.com/apache/incubator-hugegraph-toolchain/tree/master/hugegraph-tools)/[client](https://github.com/apache/incubator-hugegraph-toolchain/tree/master/hugegraph-client)**) 2. [hugegraph-computer](https://github.com/apache/incubator-hugegraph-computer) (matched **graph computing** system) 3. [hugegraph-commons](https://github.com/apache/incubator-hugegraph-commons) (**common & rpc** module) 4. [hugegraph-website](https://github.com/apache/incubator-hugegraph-doc) (**doc & website** code) From fd3c234cabf2eea1caa86010022b37530954caf4 Mon Sep 17 00:00:00 2001 From: SunnyBoy-WYH <48077841+SunnyBoy-WYH@users.noreply.github.com> Date: Mon, 2 Oct 2023 13:25:45 +0800 Subject: [PATCH 10/24] feat: support White IP List (#2299) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit tips: - this feat works when auth mode was set. - this feat works when white ip status was enabled. because now PD is unavailable,just use java list; when pd ready , we can checkout pd. --- .../api/filter/AuthenticationFilter.java | 69 ++++++-- .../hugegraph/api/profile/WhiteIpListAPI.java | 155 ++++++++++++++++++ .../hugegraph/auth/HugeGraphAuthProxy.java | 20 +++ .../hugegraph/config/ServerOptions.java | 10 +- .../apache/hugegraph/auth/AuthManager.java | 8 + .../hugegraph/auth/StandardAuthManager.java | 45 ++++- 6 files changed, 280 insertions(+), 27 deletions(-) create mode 100644 hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/profile/WhiteIpListAPI.java diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/filter/AuthenticationFilter.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/filter/AuthenticationFilter.java index f534a0ac9a..464e695fef 100644 --- a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/filter/AuthenticationFilter.java +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/filter/AuthenticationFilter.java @@ -17,14 +17,38 @@ package org.apache.hugegraph.api.filter; +import static org.apache.hugegraph.config.ServerOptions.WHITE_IP_STATUS; + import java.io.IOException; import java.security.Principal; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Objects; +import java.util.Set; + +import javax.xml.bind.DatatypeConverter; + +import org.apache.hugegraph.auth.HugeAuthenticator; +import org.apache.hugegraph.auth.HugeAuthenticator.RequiredPerm; +import org.apache.hugegraph.auth.HugeAuthenticator.RolePerm; +import org.apache.hugegraph.auth.HugeAuthenticator.User; +import org.apache.hugegraph.auth.RolePermission; +import org.apache.hugegraph.config.HugeConfig; +import org.apache.hugegraph.core.GraphManager; +import org.apache.hugegraph.util.E; +import org.apache.hugegraph.util.Log; +import org.apache.tinkerpop.gremlin.server.auth.AuthenticationException; +import org.glassfish.grizzly.http.server.Request; +import org.glassfish.grizzly.utils.Charsets; +import org.slf4j.Logger; + +import com.alipay.remoting.util.StringUtils; +import com.google.common.collect.ImmutableList; import jakarta.annotation.Priority; import jakarta.ws.rs.BadRequestException; +import jakarta.ws.rs.ForbiddenException; import jakarta.ws.rs.NotAuthorizedException; import jakarta.ws.rs.Priorities; import jakarta.ws.rs.container.ContainerRequestContext; @@ -35,23 +59,6 @@ import jakarta.ws.rs.core.SecurityContext; import jakarta.ws.rs.core.UriInfo; import jakarta.ws.rs.ext.Provider; -import javax.xml.bind.DatatypeConverter; - -import org.apache.commons.lang3.StringUtils; -import org.apache.tinkerpop.gremlin.server.auth.AuthenticationException; -import org.glassfish.grizzly.http.server.Request; -import org.glassfish.grizzly.utils.Charsets; -import org.slf4j.Logger; - -import org.apache.hugegraph.auth.HugeAuthenticator; -import org.apache.hugegraph.auth.HugeAuthenticator.RequiredPerm; -import org.apache.hugegraph.auth.HugeAuthenticator.RolePerm; -import org.apache.hugegraph.auth.HugeAuthenticator.User; -import org.apache.hugegraph.auth.RolePermission; -import org.apache.hugegraph.core.GraphManager; -import org.apache.hugegraph.util.E; -import org.apache.hugegraph.util.Log; -import com.google.common.collect.ImmutableList; @Provider @PreMatching @@ -68,12 +75,20 @@ public class AuthenticationFilter implements ContainerRequestFilter { "versions" ); + private static String whiteIpStatus; + + private static final String STRING_WHITE_IP_LIST = "whiteiplist"; + private static final String STRING_ENABLE = "enable"; + @Context private jakarta.inject.Provider managerProvider; @Context private jakarta.inject.Provider requestProvider; + @Context + private jakarta.inject.Provider configProvider; + @Override public void filter(ContainerRequestContext context) throws IOException { if (AuthenticationFilter.isWhiteAPI(context)) { @@ -102,6 +117,26 @@ protected User authenticate(ContainerRequestContext context) { path = request.getRequestURI(); } + // Check whiteIp + if (whiteIpStatus == null) { + whiteIpStatus = this.configProvider.get().get(WHITE_IP_STATUS); + } + + if (Objects.equals(whiteIpStatus, STRING_ENABLE) && request != null) { + peer = request.getRemoteAddr() + ":" + request.getRemotePort(); + path = request.getRequestURI(); + + String remoteIp = request.getRemoteAddr(); + Set whiteIpList = manager.authManager().listWhiteIPs(); + boolean whiteIpEnabled = manager.authManager().getWhiteIpStatus(); + if (!path.contains(STRING_WHITE_IP_LIST) && whiteIpEnabled && + !whiteIpList.contains(remoteIp)) { + throw new ForbiddenException( + String.format("Remote ip '%s' is not permitted", + remoteIp)); + } + } + Map credentials = new HashMap<>(); // Extract authentication credentials String auth = context.getHeaderString(HttpHeaders.AUTHORIZATION); diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/profile/WhiteIpListAPI.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/profile/WhiteIpListAPI.java new file mode 100644 index 0000000000..7503e13822 --- /dev/null +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/profile/WhiteIpListAPI.java @@ -0,0 +1,155 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.api.profile; + +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.apache.commons.lang.StringUtils; +import org.apache.hugegraph.api.API; +import org.apache.hugegraph.api.filter.StatusFilter; +import org.apache.hugegraph.auth.AuthManager; +import org.apache.hugegraph.core.GraphManager; +import org.apache.hugegraph.util.E; +import org.apache.hugegraph.util.Log; +import org.slf4j.Logger; + +import com.codahale.metrics.annotation.Timed; +import com.google.common.collect.ImmutableMap; + +import jakarta.annotation.security.RolesAllowed; +import jakarta.inject.Singleton; +import jakarta.ws.rs.Consumes; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.PUT; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.core.Context; + +@Path("whiteiplist") +@Singleton +public class WhiteIpListAPI extends API { + + private static final Logger LOG = Log.logger(WhiteIpListAPI.class); + + @GET + @Timed + @Produces(APPLICATION_JSON_WITH_CHARSET) + @RolesAllowed("admin") + public Map list(@Context GraphManager manager) { + LOG.debug("List white ips"); + AuthManager authManager = manager.authManager(); + Set whiteIpList = authManager.listWhiteIPs(); + return ImmutableMap.of("whiteIpList", whiteIpList); + } + + @POST + @Timed + @StatusFilter.Status(StatusFilter.Status.ACCEPTED) + @Consumes(APPLICATION_JSON) + @Produces(APPLICATION_JSON_WITH_CHARSET) + @RolesAllowed("admin") + public Map updateWhiteIPs(@Context GraphManager manager, Map actionMap) { + E.checkArgument(actionMap != null, + "Missing argument: actionMap"); + Set whiteIpList = manager.authManager().listWhiteIPs(); + Object ipListRaw = actionMap.get("ips"); + E.checkArgument(ipListRaw instanceof List, + "Invalid ips type '%s', must be list", ipListRaw.getClass()); + List ipList = (List) ipListRaw; + Object actionRaw = actionMap.get("action"); + E.checkArgument(actionRaw != null, + "Missing argument: action"); + E.checkArgument(actionRaw instanceof String, + "Invalid action type '%s', must be string", + actionRaw.getClass()); + String action = (String) actionRaw; + E.checkArgument(StringUtils.isNotEmpty(action), + "Missing argument: action"); + Set existedIPs = new HashSet<>(); + Set loadedIPs = new HashSet<>(); + Set illegalIPs = new HashSet<>(); + Map result = new HashMap<>(); + for (String ip : ipList) { + if (whiteIpList.contains(ip)) { + existedIPs.add(ip); + continue; + } + if ("load".equals(action)) { + boolean rightIp = checkIp(ip) ? loadedIPs.add(ip) : illegalIPs.add(ip); + } + } + switch (action) { + case "load": + LOG.debug("Load to white ip list"); + result.put("existed_ips", existedIPs); + result.put("added_ips", loadedIPs); + if (!illegalIPs.isEmpty()) { + result.put("illegal_ips", illegalIPs); + } + whiteIpList.addAll(loadedIPs); + break; + case "remove": + LOG.debug("Remove from white ip list"); + result.put("removed_ips", existedIPs); + result.put("non_existed_ips", loadedIPs); + whiteIpList.removeAll(existedIPs); + break; + default: + throw new AssertionError(String.format("Invalid action '%s', " + + "supported action is " + + "'load' or 'remove'", + action)); + } + manager.authManager().setWhiteIPs(whiteIpList); + return result; + } + + @PUT + @Timed + @Produces(APPLICATION_JSON_WITH_CHARSET) + @RolesAllowed("admin") + public Map updateStatus(@Context GraphManager manager, @QueryParam("status") String status) { + LOG.debug("Enable or disable white ip list"); + E.checkArgument("true".equals(status) || + "false".equals(status), + "Invalid status, valid status is 'true' or 'false'"); + boolean open = Boolean.parseBoolean(status); + manager.authManager().enabledWhiteIpList(open); + Map map = new HashMap<>(); + map.put("WhiteIpListOpen", open); + return map; + } + + private boolean checkIp(String ipStr) { + String ip = "^(1\\d{2}|2[0-4]\\d|25[0-5]|[1-9]\\d|[1-9])\\." + + "(00?\\d|1\\d{2}|2[0-4]\\d|25[0-5]|[1-9]\\d|\\d)\\." + + "(00?\\d|1\\d{2}|2[0-4]\\d|25[0-5]|[1-9]\\d|\\d)\\." + + "(00?\\d|1\\d{2}|2[0-4]\\d|25[0-5]|[1-9]\\d|\\d)$"; + Pattern pattern = Pattern.compile(ip); + Matcher matcher = pattern.matcher(ipStr); + return matcher.matches(); + } +} diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/auth/HugeGraphAuthProxy.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/auth/HugeGraphAuthProxy.java index 96841dbe69..83303ad51d 100644 --- a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/auth/HugeGraphAuthProxy.java +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/auth/HugeGraphAuthProxy.java @@ -1600,6 +1600,26 @@ public UserWithRole validateUser(String token) { } } + @Override + public Set listWhiteIPs() { + return this.authManager.listWhiteIPs(); + } + + @Override + public void setWhiteIPs(Set whiteIpList) { + this.authManager.setWhiteIPs(whiteIpList); + } + + @Override + public boolean getWhiteIpStatus() { + return this.authManager.getWhiteIpStatus(); + } + + @Override + public void enabledWhiteIpList(boolean status) { + this.authManager.enabledWhiteIpList(status); + } + @Override public String loginUser(String username, String password) { try { diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/config/ServerOptions.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/config/ServerOptions.java index e66b593568..6e41ae87c0 100644 --- a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/config/ServerOptions.java +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/config/ServerOptions.java @@ -264,4 +264,12 @@ public static synchronized ServerOptions instance() { disallowEmpty(), true ); -} \ No newline at end of file + + public static final ConfigOption WHITE_IP_STATUS = + new ConfigOption<>( + "white_ip.status", + "The status of whether enable white ip.", + disallowEmpty(), + "disable" + ); +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/AuthManager.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/AuthManager.java index 2dba7c7a15..908eed01f1 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/AuthManager.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/AuthManager.java @@ -126,4 +126,12 @@ public interface AuthManager { UserWithRole validateUser(String username, String password); UserWithRole validateUser(String token); + + Set listWhiteIPs(); + + void setWhiteIPs(Set whiteIpList); + + boolean getWhiteIpStatus(); + + void enabledWhiteIpList(boolean status); } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/StandardAuthManager.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/StandardAuthManager.java index 910f19cdc5..123c8e9ffd 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/StandardAuthManager.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/StandardAuthManager.java @@ -27,31 +27,30 @@ import javax.security.sasl.AuthenticationException; -import jakarta.ws.rs.ForbiddenException; - import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang.StringUtils; +import org.apache.hugegraph.HugeException; +import org.apache.hugegraph.HugeGraphParams; +import org.apache.hugegraph.auth.HugeUser.P; +import org.apache.hugegraph.auth.SchemaDefine.AuthElement; import org.apache.hugegraph.backend.cache.Cache; import org.apache.hugegraph.backend.cache.CacheManager; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.id.IdGenerator; import org.apache.hugegraph.config.AuthOptions; +import org.apache.hugegraph.config.HugeConfig; import org.apache.hugegraph.type.define.Directions; +import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.LockUtil; +import org.apache.hugegraph.util.Log; import org.apache.hugegraph.util.StringEncoding; import org.slf4j.Logger; -import org.apache.hugegraph.HugeException; -import org.apache.hugegraph.HugeGraphParams; -import org.apache.hugegraph.auth.HugeUser.P; -import org.apache.hugegraph.auth.SchemaDefine.AuthElement; -import org.apache.hugegraph.config.HugeConfig; -import org.apache.hugegraph.util.E; -import org.apache.hugegraph.util.Log; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.jsonwebtoken.Claims; +import jakarta.ws.rs.ForbiddenException; public class StandardAuthManager implements AuthManager { @@ -77,6 +76,10 @@ public class StandardAuthManager implements AuthManager { private final TokenGenerator tokenGenerator; private final long tokenExpire; + private Set ipWhiteList; + + private Boolean ipWhiteListEnabled; + public StandardAuthManager(HugeGraphParams graph) { E.checkNotNull(graph, "graph"); HugeConfig config = graph.configuration(); @@ -104,6 +107,10 @@ public StandardAuthManager(HugeGraphParams graph) { HugeAccess::fromEdge); this.tokenGenerator = new TokenGenerator(config); + + this.ipWhiteList = new HashSet<>(); + + this.ipWhiteListEnabled = false; } private Cache cache(String prefix, long capacity, @@ -689,6 +696,26 @@ public UserWithRole validateUser(String token) { return new UserWithRole(user.id(), username, this.rolePermission(user)); } + @Override + public Set listWhiteIPs() { + return ipWhiteList; + } + + @Override + public void setWhiteIPs(Set ipWhiteList) { + this.ipWhiteList = ipWhiteList; + } + + @Override + public boolean getWhiteIpStatus() { + return this.ipWhiteListEnabled; + } + + @Override + public void enabledWhiteIpList(boolean status) { + this.ipWhiteListEnabled = status; + } + /** * Maybe can define an proxy class to choose forward or call local */ From b05215e82ba8727cffd9eb5bd36324b18fe163ee Mon Sep 17 00:00:00 2001 From: SunnyBoy-WYH <48077841+SunnyBoy-WYH@users.noreply.github.com> Date: Sun, 8 Oct 2023 20:21:09 +0800 Subject: [PATCH 11/24] feat(api): support metric API Prometheus format & add statistic metric api (#2286) --- .../java/org/apache/hugegraph/api/API.java | 3 +- .../hugegraph/api/filter/AccessLogFilter.java | 82 +++++ .../hugegraph/api/filter/PathFilter.java | 40 +++ .../hugegraph/api/metrics/MetricsAPI.java | 316 ++++++++++++++++-- .../apache/hugegraph/metrics/MetricsKeys.java | 40 +++ .../apache/hugegraph/metrics/MetricsUtil.java | 165 ++++++++- .../apache/hugegraph/api/MetricsApiTest.java | 34 +- 7 files changed, 643 insertions(+), 37 deletions(-) create mode 100644 hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/filter/AccessLogFilter.java create mode 100644 hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/filter/PathFilter.java create mode 100644 hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/metrics/MetricsKeys.java diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/API.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/API.java index 99fe67e5ba..e57f6739df 100644 --- a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/API.java +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/API.java @@ -27,6 +27,7 @@ import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.core.GraphManager; import org.apache.hugegraph.define.Checkable; +import org.apache.hugegraph.exception.NotFoundException; import org.apache.hugegraph.metrics.MetricsUtil; import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.InsertionOrderUtil; @@ -38,7 +39,6 @@ import com.google.common.collect.ImmutableMap; import jakarta.ws.rs.ForbiddenException; -import jakarta.ws.rs.NotFoundException; import jakarta.ws.rs.NotSupportedException; import jakarta.ws.rs.core.MediaType; @@ -49,6 +49,7 @@ public class API { public static final String APPLICATION_JSON = MediaType.APPLICATION_JSON; public static final String APPLICATION_JSON_WITH_CHARSET = APPLICATION_JSON + ";charset=" + CHARSET; + public static final String APPLICATION_TEXT_WITH_CHARSET = MediaType.TEXT_PLAIN + ";charset=" + CHARSET; public static final String JSON = MediaType.APPLICATION_JSON_TYPE .getSubtype(); public static final String ACTION_APPEND = "append"; diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/filter/AccessLogFilter.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/filter/AccessLogFilter.java new file mode 100644 index 0000000000..ba9c981186 --- /dev/null +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/filter/AccessLogFilter.java @@ -0,0 +1,82 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.api.filter; + +import static org.apache.hugegraph.api.filter.PathFilter.REQUEST_TIME; +import static org.apache.hugegraph.metrics.MetricsUtil.METRICS_PATH_FAILED_COUNTER; +import static org.apache.hugegraph.metrics.MetricsUtil.METRICS_PATH_RESPONSE_TIME_HISTOGRAM; +import static org.apache.hugegraph.metrics.MetricsUtil.METRICS_PATH_SUCCESS_COUNTER; +import static org.apache.hugegraph.metrics.MetricsUtil.METRICS_PATH_TOTAL_COUNTER; + +import java.io.IOException; + +import org.apache.hugegraph.metrics.MetricsUtil; + +import jakarta.inject.Singleton; +import jakarta.ws.rs.container.ContainerRequestContext; +import jakarta.ws.rs.container.ContainerResponseContext; +import jakarta.ws.rs.container.ContainerResponseFilter; +import jakarta.ws.rs.ext.Provider; + + +@Provider +@Singleton +public class AccessLogFilter implements ContainerResponseFilter { + + private static final String DELIMETER = "/"; + + /** + * Use filter to log request info + * + * @param requestContext requestContext + * @param responseContext responseContext + */ + @Override + public void filter(ContainerRequestContext requestContext, ContainerResponseContext responseContext) throws IOException { + // Grab corresponding request / response info from context; + String method = requestContext.getRequest().getMethod(); + String path = requestContext.getUriInfo().getPath(); + String metricsName = join(path, method); + + MetricsUtil.registerCounter(join(metricsName, METRICS_PATH_TOTAL_COUNTER)).inc(); + if (statusOk(responseContext.getStatus())) { + MetricsUtil.registerCounter(join(metricsName, METRICS_PATH_SUCCESS_COUNTER)).inc(); + } else { + MetricsUtil.registerCounter(join(metricsName, METRICS_PATH_FAILED_COUNTER)).inc(); + } + + // get responseTime + Object requestTime = requestContext.getProperty(REQUEST_TIME); + if(requestTime!=null){ + long now = System.currentTimeMillis(); + long responseTime = (now - (long)requestTime); + + MetricsUtil.registerHistogram( + join(metricsName, METRICS_PATH_RESPONSE_TIME_HISTOGRAM)) + .update(responseTime); + } + } + + private String join(String path1, String path2) { + return String.join(DELIMETER, path1, path2); + } + + private boolean statusOk(int status){ + return status == 200 || status == 201 || status == 202; + } +} diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/filter/PathFilter.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/filter/PathFilter.java new file mode 100644 index 0000000000..3414d6831b --- /dev/null +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/filter/PathFilter.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.api.filter; + +import java.io.IOException; + +import jakarta.inject.Singleton; +import jakarta.ws.rs.container.ContainerRequestContext; +import jakarta.ws.rs.container.ContainerRequestFilter; +import jakarta.ws.rs.container.PreMatching; +import jakarta.ws.rs.ext.Provider; + +@Provider +@Singleton +@PreMatching +public class PathFilter implements ContainerRequestFilter { + + public static final String REQUEST_TIME = "request_time"; + + @Override + public void filter(ContainerRequestContext context) + throws IOException { + context.setProperty(REQUEST_TIME, System.currentTimeMillis()); + } +} diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/metrics/MetricsAPI.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/metrics/MetricsAPI.java index 6df4f6453b..f74286b5f8 100644 --- a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/metrics/MetricsAPI.java +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/metrics/MetricsAPI.java @@ -19,33 +19,66 @@ import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.concurrent.TimeUnit.SECONDS; +import static org.apache.hugegraph.metrics.MetricsUtil.COUNT_ATTR; +import static org.apache.hugegraph.metrics.MetricsUtil.END_LSTR; +import static org.apache.hugegraph.metrics.MetricsUtil.FIFT_MIN_RATE_ATRR; +import static org.apache.hugegraph.metrics.MetricsUtil.FIVE_MIN_RATE_ATRR; +import static org.apache.hugegraph.metrics.MetricsUtil.GAUGE_TYPE; +import static org.apache.hugegraph.metrics.MetricsUtil.HISTOGRAM_TYPE; +import static org.apache.hugegraph.metrics.MetricsUtil.LEFT_NAME_STR; +import static org.apache.hugegraph.metrics.MetricsUtil.MEAN_RATE_ATRR; +import static org.apache.hugegraph.metrics.MetricsUtil.METRICS_PATH_FAILED_COUNTER; +import static org.apache.hugegraph.metrics.MetricsUtil.METRICS_PATH_RESPONSE_TIME_HISTOGRAM; +import static org.apache.hugegraph.metrics.MetricsUtil.METRICS_PATH_SUCCESS_COUNTER; +import static org.apache.hugegraph.metrics.MetricsUtil.METRICS_PATH_TOTAL_COUNTER; +import static org.apache.hugegraph.metrics.MetricsUtil.ONE_MIN_RATE_ATRR; +import static org.apache.hugegraph.metrics.MetricsUtil.PROM_HELP_NAME; +import static org.apache.hugegraph.metrics.MetricsUtil.RIGHT_NAME_STR; +import static org.apache.hugegraph.metrics.MetricsUtil.SPACE_STR; +import static org.apache.hugegraph.metrics.MetricsUtil.STR_HELP; +import static org.apache.hugegraph.metrics.MetricsUtil.STR_TYPE; +import static org.apache.hugegraph.metrics.MetricsUtil.UNTYPED; +import static org.apache.hugegraph.metrics.MetricsUtil.VERSION_STR; +import static org.apache.hugegraph.metrics.MetricsUtil.exportSnapshot; +import static org.apache.hugegraph.metrics.MetricsUtil.replaceDotDashInKey; +import static org.apache.hugegraph.metrics.MetricsUtil.replaceSlashInKey; +import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; -import io.swagger.v3.oas.annotations.tags.Tag; -import jakarta.annotation.security.RolesAllowed; -import jakarta.inject.Singleton; -import jakarta.ws.rs.GET; -import jakarta.ws.rs.Path; -import jakarta.ws.rs.Produces; -import jakarta.ws.rs.core.Context; - +import org.apache.hugegraph.HugeGraph; +import org.apache.hugegraph.api.API; +import org.apache.hugegraph.backend.store.BackendMetrics; import org.apache.hugegraph.core.GraphManager; +import org.apache.hugegraph.metrics.MetricsKeys; import org.apache.hugegraph.metrics.MetricsModule; +import org.apache.hugegraph.metrics.MetricsUtil; import org.apache.hugegraph.metrics.ServerReporter; import org.apache.hugegraph.metrics.SystemMetrics; -import org.slf4j.Logger; - -import org.apache.hugegraph.HugeGraph; -import org.apache.hugegraph.api.API; -import org.apache.hugegraph.backend.store.BackendMetrics; import org.apache.hugegraph.util.InsertionOrderUtil; import org.apache.hugegraph.util.JsonUtil; import org.apache.hugegraph.util.Log; +import org.apache.hugegraph.version.ApiVersion; +import org.apache.tinkerpop.gremlin.server.util.MetricManager; +import org.slf4j.Logger; + +import com.codahale.metrics.Counter; +import com.codahale.metrics.Gauge; +import com.codahale.metrics.Histogram; +import com.codahale.metrics.Meter; import com.codahale.metrics.Metric; import com.codahale.metrics.annotation.Timed; +import io.swagger.v3.oas.annotations.tags.Tag; +import jakarta.annotation.security.RolesAllowed; +import jakarta.inject.Singleton; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.core.Context; + @Singleton @Path("metrics") @Tag(name = "MetricsAPI") @@ -53,12 +86,14 @@ public class MetricsAPI extends API { private static final Logger LOG = Log.logger(MetricsAPI.class); - private SystemMetrics systemMetrics; + private static final String JSON_STR = "json"; static { JsonUtil.registerModule(new MetricsModule(SECONDS, MILLISECONDS, false)); } + private final SystemMetrics systemMetrics; + public MetricsAPI() { this.systemMetrics = new SystemMetrics(); } @@ -94,21 +129,6 @@ public String backend(@Context GraphManager manager) { return JsonUtil.toJson(results); } - @GET - @Timed - @Produces(APPLICATION_JSON_WITH_CHARSET) - @RolesAllowed({"admin", "$owner= $action=metrics_read"}) - public String all() { - ServerReporter reporter = ServerReporter.instance(); - Map> result = new LinkedHashMap<>(); - result.put("gauges", reporter.gauges()); - result.put("counters", reporter.counters()); - result.put("histograms", reporter.histograms()); - result.put("meters", reporter.meters()); - result.put("timers", reporter.timers()); - return JsonUtil.toJson(result); - } - @GET @Timed @Path("gauges") @@ -158,4 +178,242 @@ public String timers() { ServerReporter reporter = ServerReporter.instance(); return JsonUtil.toJson(reporter.timers()); } + + @GET + @Timed + @Produces(APPLICATION_TEXT_WITH_CHARSET) + @RolesAllowed({"admin", "$owner= $action=metrics_read"}) + public String all(@Context GraphManager manager, + @QueryParam("type") String type) { + if (type != null && type.equals(JSON_STR)) { + return baseMetricAll(); + } else { + return baseMetricPrometheusAll(); + } + } + + @GET + @Path("statistics") + @Timed + @Produces(APPLICATION_TEXT_WITH_CHARSET) + @RolesAllowed({"admin", "$owner= $action=metrics_read"}) + public String statistics(@QueryParam("type") String type) { + Map> metricMap = statistics(); + + if (type != null && type.equals(JSON_STR)) { + return JsonUtil.toJson(metricMap); + } + return statisticsProm(metricMap); + } + + public String baseMetricAll() { + ServerReporter reporter = ServerReporter.instance(); + Map> result = new LinkedHashMap<>(); + result.put("gauges", reporter.gauges()); + result.put("counters", reporter.counters()); + result.put("histograms", reporter.histograms()); + result.put("meters", reporter.meters()); + result.put("timers", reporter.timers()); + return JsonUtil.toJson(result); + } + + private String baseMetricPrometheusAll() { + StringBuilder promMetric = new StringBuilder(); + ServerReporter reporter = ServerReporter.instance(); + String helpName = PROM_HELP_NAME; + // build version info + promMetric.append(STR_HELP) + .append(helpName).append(END_LSTR); + promMetric.append(STR_TYPE) + .append(helpName) + .append(SPACE_STR + UNTYPED + END_LSTR); + promMetric.append(helpName) + .append(VERSION_STR) + .append(ApiVersion.VERSION.toString()).append("\",}") + .append(SPACE_STR + "1.0" + END_LSTR); + + // build gauges metric info + for (String key : reporter.gauges().keySet()) { + final Gauge gauge + = reporter.gauges().get(key); + if (gauge != null) { + helpName = replaceDotDashInKey(key); + promMetric.append(STR_HELP) + .append(helpName).append(END_LSTR); + promMetric.append(STR_TYPE) + .append(helpName).append(SPACE_STR + GAUGE_TYPE + END_LSTR); + promMetric.append(helpName) + .append(SPACE_STR + gauge.getValue() + END_LSTR); + } + } + + // build histograms metric info + for (String histogramkey : reporter.histograms().keySet()) { + final Histogram histogram = reporter.histograms().get(histogramkey); + if (histogram != null) { + helpName = replaceDotDashInKey(histogramkey); + promMetric.append(STR_HELP) + .append(helpName).append(END_LSTR); + promMetric.append(STR_TYPE) + .append(helpName) + .append(SPACE_STR + HISTOGRAM_TYPE + END_LSTR); + + promMetric.append(helpName) + .append(COUNT_ATTR) + .append(histogram.getCount() + END_LSTR); + promMetric.append( + exportSnapshot(helpName, histogram.getSnapshot())); + } + } + + // build meters metric info + for (String meterkey : reporter.meters().keySet()) { + final Meter metric = reporter.meters().get(meterkey); + if (metric != null) { + helpName = replaceDotDashInKey(meterkey); + promMetric.append(STR_HELP) + .append(helpName).append(END_LSTR); + promMetric.append(STR_TYPE) + .append(helpName) + .append(SPACE_STR + HISTOGRAM_TYPE + END_LSTR); + + promMetric.append(helpName) + .append(COUNT_ATTR) + .append(metric.getCount() + END_LSTR); + promMetric.append(helpName) + .append(MEAN_RATE_ATRR) + .append(metric.getMeanRate() + END_LSTR); + promMetric.append(helpName) + .append(ONE_MIN_RATE_ATRR) + .append(metric.getOneMinuteRate() + END_LSTR); + promMetric.append(helpName) + .append(FIVE_MIN_RATE_ATRR) + .append(metric.getFiveMinuteRate() + END_LSTR); + promMetric.append(helpName) + .append(FIFT_MIN_RATE_ATRR) + .append(metric.getFifteenMinuteRate() + END_LSTR); + } + } + + // build timer metric info + for (String timerkey : reporter.timers().keySet()) { + final com.codahale.metrics.Timer timer = reporter.timers() + .get(timerkey); + if (timer != null) { + helpName = replaceDotDashInKey(timerkey); + promMetric.append(STR_HELP) + .append(helpName).append(END_LSTR); + promMetric.append(STR_TYPE) + .append(helpName) + .append(SPACE_STR + HISTOGRAM_TYPE + END_LSTR); + + promMetric.append(helpName) + .append(COUNT_ATTR) + .append(timer.getCount() + END_LSTR); + promMetric.append(helpName) + .append(ONE_MIN_RATE_ATRR) + .append(timer.getOneMinuteRate() + END_LSTR); + promMetric.append(helpName) + .append(FIVE_MIN_RATE_ATRR) + .append(timer.getFiveMinuteRate() + END_LSTR); + promMetric.append(helpName) + .append(FIFT_MIN_RATE_ATRR) + .append(timer.getFifteenMinuteRate() + END_LSTR); + promMetric.append( + exportSnapshot(helpName, timer.getSnapshot())); + } + } + + MetricsUtil.writePrometheusFormat(promMetric, MetricManager.INSTANCE.getRegistry()); + + return promMetric.toString(); + } + + private Map> statistics() { + Map> metricsMap = new HashMap<>(); + ServerReporter reporter = ServerReporter.instance(); + for (Map.Entry entry : reporter.histograms().entrySet()) { + // entryKey = path/method/responseTimeHistogram + String entryKey = entry.getKey(); + String[] split = entryKey.split("/"); + String lastWord = split[split.length - 1]; + if (!lastWord.equals(METRICS_PATH_RESPONSE_TIME_HISTOGRAM)) { + // original metrics dont report + continue; + } + // metricsName = path/method + String metricsName = + entryKey.substring(0, entryKey.length() - lastWord.length() - 1); + + Counter totalCounter = reporter.counters().get( + joinWithSlash(metricsName, METRICS_PATH_TOTAL_COUNTER)); + Counter failedCounter = reporter.counters().get( + joinWithSlash(metricsName, METRICS_PATH_FAILED_COUNTER)); + Counter successCounter = reporter.counters().get( + joinWithSlash(metricsName, METRICS_PATH_SUCCESS_COUNTER)); + + + Histogram histogram = entry.getValue(); + Map entryMetricsMap = new HashMap<>(); + entryMetricsMap.put(MetricsKeys.MAX_RESPONSE_TIME.name(), + histogram.getSnapshot().getMax()); + entryMetricsMap.put(MetricsKeys.MEAN_RESPONSE_TIME.name(), + histogram.getSnapshot().getMean()); + + entryMetricsMap.put(MetricsKeys.TOTAL_REQUEST.name(), + totalCounter.getCount()); + + if (failedCounter == null) { + entryMetricsMap.put(MetricsKeys.FAILED_REQUEST.name(), 0); + } else { + entryMetricsMap.put(MetricsKeys.FAILED_REQUEST.name(), + failedCounter.getCount()); + } + + if (successCounter == null) { + entryMetricsMap.put(MetricsKeys.SUCCESS_REQUEST.name(), 0); + } else { + entryMetricsMap.put(MetricsKeys.SUCCESS_REQUEST.name(), + successCounter.getCount()); + } + + metricsMap.put(metricsName, entryMetricsMap); + + } + return metricsMap; + } + + private String statisticsProm(Map> metricMap) { + StringBuilder promMetric = new StringBuilder(); + + // build version info + promMetric.append(STR_HELP) + .append(PROM_HELP_NAME).append(END_LSTR); + promMetric.append(STR_TYPE) + .append(PROM_HELP_NAME) + .append(SPACE_STR + UNTYPED + END_LSTR); + promMetric.append(PROM_HELP_NAME) + .append(VERSION_STR) + .append(ApiVersion.VERSION.toString()).append("\",}") + .append(SPACE_STR + "1.0" + END_LSTR); + + for (String methodKey : metricMap.keySet()) { + String metricName = replaceSlashInKey(methodKey); + promMetric.append(STR_HELP) + .append(metricName).append(END_LSTR); + promMetric.append(STR_TYPE) + .append(metricName).append(SPACE_STR + GAUGE_TYPE + END_LSTR); + Map itemMetricMap = metricMap.get(methodKey); + for (String labelName : itemMetricMap.keySet()) { + promMetric.append(metricName).append(LEFT_NAME_STR).append(labelName) + .append(RIGHT_NAME_STR).append(itemMetricMap.get(labelName)) + .append(END_LSTR); + } + } + return promMetric.toString(); + } + + private String joinWithSlash(String path1, String path2) { + return String.join("/", path1, path2); + } } diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/metrics/MetricsKeys.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/metrics/MetricsKeys.java new file mode 100644 index 0000000000..1cda15c829 --- /dev/null +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/metrics/MetricsKeys.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.metrics; + +public enum MetricsKeys { + + MAX_RESPONSE_TIME(1, "max_response_time"), + + MEAN_RESPONSE_TIME(2, "mean_response_time"), + + TOTAL_REQUEST(3, "total_request"), + + FAILED_REQUEST(4, "failed_request"), + + SUCCESS_REQUEST(5, "success_request"); + + private final byte code; + private final String name; + + MetricsKeys(int code, String name) { + assert code < 256; + this.code = (byte) code; + this.name = name; + } +} diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/metrics/MetricsUtil.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/metrics/MetricsUtil.java index fabd1df7b9..bb411f9276 100644 --- a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/metrics/MetricsUtil.java +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/metrics/MetricsUtil.java @@ -24,12 +24,45 @@ import com.codahale.metrics.Histogram; import com.codahale.metrics.Meter; import com.codahale.metrics.MetricRegistry; +import com.codahale.metrics.Snapshot; import com.codahale.metrics.Timer; public class MetricsUtil { - private static final MetricRegistry REGISTRY = - MetricManager.INSTANCE.getRegistry(); + public static final String METRICS_PATH_TOTAL_COUNTER = "TOTAL_COUNTER"; + public static final String METRICS_PATH_FAILED_COUNTER = "FAILED_COUNTER"; + public static final String METRICS_PATH_SUCCESS_COUNTER = "SUCCESS_COUNTER"; + public static final String METRICS_PATH_RESPONSE_TIME_HISTOGRAM = + "RESPONSE_TIME_HISTOGRAM"; + public static final String P75_ATTR = "{name=\"p75\",} "; + public static final String P95_ATTR = "{name=\"p95\",} "; + public static final String P98_ATTR = "{name=\"p98\",} "; + public static final String P99_ATTR = "{name=\"p99\",} "; + public static final String P999_ATTR = "{name=\"p999\",} "; + public static final String MEAN_RATE_ATRR = "{name=\"mean_rate\",} "; + public static final String ONE_MIN_RATE_ATRR = "{name=\"m1_rate\",} "; + public static final String FIVE_MIN_RATE_ATRR = "{name=\"m5_rate\",} "; + public static final String FIFT_MIN_RATE_ATRR = "{name=\"m15_rate\",} "; + public static final MetricRegistry REGISTRY = MetricManager.INSTANCE.getRegistry(); + public static final String STR_HELP = "# HELP "; + public static final String STR_TYPE = "# TYPE "; + public static final String HISTOGRAM_TYPE = "histogram"; + public static final String UNTYPED = "untyped"; + public static final String GAUGE_TYPE = "gauge"; + public static final String END_LSTR = "\n"; + public static final String SPACE_STR = " "; + public static final String VERSION_STR = "{version=\""; + public static final String COUNT_ATTR = "{name=\"count\",} "; + public static final String MIN_ATTR = "{name=\"min\",} "; + public static final String MAX_ATTR = "{name=\"max\",} "; + public static final String MEAN_ATTR = "{name=\"mean\",} "; + public static final String STDDEV_ATTR = "{name=\"stddev\",} "; + public static final String P50_ATTR = "{name=\"p50\",} "; + + public static final String LEFT_NAME_STR = "{name="; + public static final String RIGHT_NAME_STR = ",} "; + public static final String PROM_HELP_NAME = "hugegraph_info"; + public static Gauge registerGauge(Class clazz, String name, Gauge gauge) { @@ -40,10 +73,18 @@ public static Counter registerCounter(Class clazz, String name) { return REGISTRY.counter(MetricRegistry.name(clazz, name)); } + public static Counter registerCounter(String name) { + return REGISTRY.counter(MetricRegistry.name(name)); + } + public static Histogram registerHistogram(Class clazz, String name) { return REGISTRY.histogram(MetricRegistry.name(clazz, name)); } + public static Histogram registerHistogram(String name) { + return REGISTRY.histogram(name); + } + public static Meter registerMeter(Class clazz, String name) { return REGISTRY.meter(MetricRegistry.name(clazz, name)); } @@ -51,4 +92,124 @@ public static Meter registerMeter(Class clazz, String name) { public static Timer registerTimer(Class clazz, String name) { return REGISTRY.timer(MetricRegistry.name(clazz, name)); } + + public static String replaceDotDashInKey(String orgKey) { + return orgKey.replace(".", "_").replace("-", "_"); + } + + public static String replaceSlashInKey(String orgKey) { + return orgKey.replace("/", "_"); + } + + public static void writePrometheusFormat(StringBuilder promeMetrics, MetricRegistry registry) { + // gauges + registry.getGauges().forEach((key, gauge) -> { + if (gauge != null) { + String helpName = replaceDotDashInKey(key); + promeMetrics.append(STR_HELP) + .append(helpName).append(END_LSTR); + promeMetrics.append(STR_TYPE) + .append(helpName).append(SPACE_STR + GAUGE_TYPE + END_LSTR); + promeMetrics.append(helpName).append(SPACE_STR).append(gauge.getValue()) + .append(END_LSTR); + } + }); + + // histograms + registry.getHistograms().forEach((key, histogram) -> { + if (histogram != null) { + String helpName = replaceDotDashInKey(key); + promeMetrics.append(STR_HELP) + .append(helpName).append(END_LSTR); + promeMetrics.append(STR_TYPE) + .append(helpName) + .append(SPACE_STR + HISTOGRAM_TYPE + END_LSTR); + + promeMetrics.append(helpName) + .append(COUNT_ATTR).append(histogram.getCount()).append(END_LSTR); + promeMetrics.append( + exportSnapshot(helpName, histogram.getSnapshot())); + } + }); + + // meters + registry.getMeters().forEach((key, metric) -> { + if (metric != null) { + String helpName = replaceDotDashInKey(key); + promeMetrics.append(STR_HELP) + .append(helpName).append(END_LSTR); + promeMetrics.append(STR_TYPE) + .append(helpName) + .append(SPACE_STR + HISTOGRAM_TYPE + END_LSTR); + + promeMetrics.append(helpName) + .append(COUNT_ATTR).append(metric.getCount()).append(END_LSTR); + promeMetrics.append(helpName) + .append(MEAN_RATE_ATRR).append(metric.getMeanRate()).append(END_LSTR); + promeMetrics.append(helpName) + .append(ONE_MIN_RATE_ATRR).append(metric.getOneMinuteRate()) + .append(END_LSTR); + promeMetrics.append(helpName) + .append(FIVE_MIN_RATE_ATRR).append(metric.getFiveMinuteRate()) + .append(END_LSTR); + promeMetrics.append(helpName) + .append(FIFT_MIN_RATE_ATRR).append(metric.getFifteenMinuteRate()) + .append(END_LSTR); + } + }); + + // timer + registry.getTimers().forEach((key, timer) -> { + if (timer != null) { + String helpName = replaceDotDashInKey(key); + promeMetrics.append(STR_HELP) + .append(helpName).append(END_LSTR); + promeMetrics.append(STR_TYPE) + .append(helpName) + .append(SPACE_STR + HISTOGRAM_TYPE + END_LSTR); + + promeMetrics.append(helpName) + .append(COUNT_ATTR).append(timer.getCount()).append(END_LSTR); + promeMetrics.append(helpName) + .append(ONE_MIN_RATE_ATRR).append(timer.getOneMinuteRate()) + .append(END_LSTR); + promeMetrics.append(helpName) + .append(FIVE_MIN_RATE_ATRR).append(timer.getFiveMinuteRate()) + .append(END_LSTR); + promeMetrics.append(helpName) + .append(FIFT_MIN_RATE_ATRR).append(timer.getFifteenMinuteRate()) + .append(END_LSTR); + promeMetrics.append( + exportSnapshot(helpName, timer.getSnapshot())); + } + }); + } + + public static String exportSnapshot(final String helpName, final Snapshot snapshot) { + if (snapshot == null) { + return ""; + } + StringBuilder snapMetrics = new StringBuilder(); + snapMetrics.append(helpName) + .append(MIN_ATTR).append(snapshot.getMin()).append(END_LSTR); + snapMetrics.append(helpName) + .append(MAX_ATTR).append(snapshot.getMax()).append(END_LSTR); + snapMetrics.append(helpName) + .append(MEAN_ATTR).append(snapshot.getMean()).append(END_LSTR); + snapMetrics.append(helpName) + .append(STDDEV_ATTR).append(snapshot.getStdDev()).append(END_LSTR); + snapMetrics.append(helpName) + .append(P50_ATTR).append(snapshot.getMedian()).append(END_LSTR); + snapMetrics.append(helpName) + .append(P75_ATTR).append(snapshot.get75thPercentile()).append(END_LSTR); + snapMetrics.append(helpName) + .append(P95_ATTR).append(snapshot.get95thPercentile()).append(END_LSTR); + snapMetrics.append(helpName) + .append(P98_ATTR).append(snapshot.get98thPercentile()).append(END_LSTR); + snapMetrics.append(helpName) + .append(P99_ATTR).append(snapshot.get99thPercentile()).append(END_LSTR); + snapMetrics.append(helpName) + .append(P999_ATTR).append(snapshot.get999thPercentile()).append(END_LSTR); + return snapMetrics.toString(); + } } diff --git a/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/api/MetricsApiTest.java b/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/api/MetricsApiTest.java index 499103c174..cce5af30cc 100644 --- a/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/api/MetricsApiTest.java +++ b/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/api/MetricsApiTest.java @@ -17,20 +17,24 @@ package org.apache.hugegraph.api; +import java.util.HashMap; import java.util.Map; -import jakarta.ws.rs.core.Response; +import org.apache.hugegraph.testutil.Assert; import org.junit.Test; -import org.apache.hugegraph.testutil.Assert; +import jakarta.ws.rs.core.Response; public class MetricsApiTest extends BaseApiTest { - private static String path = "/metrics"; + private static final String path = "/metrics"; + private static final String statisticsPath = path + "/statistics"; @Test - public void testMetricsAll() { - Response r = client().get(path); + public void testBaseMetricsAll() { + Map params = new HashMap<>(); + params.put("type", "json"); + Response r = client().get(path, params); String result = assertResponseStatus(200, r); assertJsonContains(result, "gauges"); assertJsonContains(result, "counters"); @@ -39,6 +43,26 @@ public void testMetricsAll() { assertJsonContains(result, "timers"); } + @Test + public void testBaseMetricsPromAll() { + Response r = client().get(path); + assertResponseStatus(200, r); + } + + @Test + public void testStatisticsMetricsAll() { + Map params = new HashMap<>(); + params.put("type", "json"); + Response r = client().get(path); + assertResponseStatus(200, r); + } + + @Test + public void testStatisticsMetricsPromAll() { + Response r = client().get(statisticsPath); + assertResponseStatus(200, r); + } + @Test public void testMetricsSystem() { Response r = client().get(path, "system"); From e7cfb0f2c963af37518bcd4a55b25d5be28f5610 Mon Sep 17 00:00:00 2001 From: Jermy Li Date: Fri, 13 Oct 2023 13:47:01 +0800 Subject: [PATCH 12/24] README.md tiny improve (#2320) --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index d50235bbec..5e589be604 100644 --- a/README.md +++ b/README.md @@ -57,9 +57,9 @@ The project [doc page](https://hugegraph.apache.org/docs/) contains more informa and provides detailed documentation for users. (Structure / Usage / API / Configs...) And here are links of other **HugeGraph** component/repositories: -1. [hugegraph-toolchain](https://github.com/apache/incubator-hugegraph-toolchain) (graph **[loader](https://github.com/apache/incubator-hugegraph-toolchain/tree/master/hugegraph-loader)/[dashboard](https://github.com/apache/incubator-hugegraph-toolchain/tree/master/hugegraph-hubble)/[tool](https://github.com/apache/incubator-hugegraph-toolchain/tree/master/hugegraph-tools)/[client](https://github.com/apache/incubator-hugegraph-toolchain/tree/master/hugegraph-client)**) -2. [hugegraph-computer](https://github.com/apache/incubator-hugegraph-computer) (matched **graph computing** system) -3. [hugegraph-commons](https://github.com/apache/incubator-hugegraph-commons) (**common & rpc** module) +1. [hugegraph-toolchain](https://github.com/apache/incubator-hugegraph-toolchain) (graph tools **[loader](https://github.com/apache/incubator-hugegraph-toolchain/tree/master/hugegraph-loader)/[dashboard](https://github.com/apache/incubator-hugegraph-toolchain/tree/master/hugegraph-hubble)/[tool](https://github.com/apache/incubator-hugegraph-toolchain/tree/master/hugegraph-tools)/[client](https://github.com/apache/incubator-hugegraph-toolchain/tree/master/hugegraph-client)**) +2. [hugegraph-computer](https://github.com/apache/incubator-hugegraph-computer) (integrated **graph computing** system) +3. [hugegraph-commons](https://github.com/apache/incubator-hugegraph-commons) (**common & rpc** libs) 4. [hugegraph-website](https://github.com/apache/incubator-hugegraph-doc) (**doc & website** code) ## License From f351636ccb5d142ec5867a33467208b29edde023 Mon Sep 17 00:00:00 2001 From: SunnyBoy-WYH <48077841+SunnyBoy-WYH@users.noreply.github.com> Date: Fri, 20 Oct 2023 10:15:51 +0800 Subject: [PATCH 13/24] feat(api): support embedded arthas agent in hugegraph-server (#2278) --- hugegraph-server/hugegraph-api/pom.xml | 15 +- .../hugegraph/api/arthas/ArthasAPI.java | 56 +++++ .../hugegraph/config/ServerOptions.java | 32 +++ .../hugegraph-dist/release-docs/NOTICE | 38 ++++ .../licenses/LICENSE-arthas-agent-attach.txt | 202 ++++++++++++++++++ .../licenses/LICENSE-arthas-packaging.txt | 202 ++++++++++++++++++ .../LICENSE-byte-buddy-agent-1.11.6.txt | 176 +++++++++++++++ .../release-docs/licenses/LICENSE-zt-zip.txt | 202 ++++++++++++++++++ .../scripts/dependency/known-dependencies.txt | 4 + .../static/conf/rest-server.properties | 6 + .../apache/hugegraph/api/ApiTestSuite.java | 3 +- .../apache/hugegraph/api/ArthasApiTest.java | 62 ++++++ .../org/apache/hugegraph/api/BaseApiTest.java | 29 ++- hugegraph-server/pom.xml | 1 + 14 files changed, 1014 insertions(+), 14 deletions(-) create mode 100644 hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/arthas/ArthasAPI.java create mode 100644 hugegraph-server/hugegraph-dist/release-docs/licenses/LICENSE-arthas-agent-attach.txt create mode 100644 hugegraph-server/hugegraph-dist/release-docs/licenses/LICENSE-arthas-packaging.txt create mode 100644 hugegraph-server/hugegraph-dist/release-docs/licenses/LICENSE-byte-buddy-agent-1.11.6.txt create mode 100644 hugegraph-server/hugegraph-dist/release-docs/licenses/LICENSE-zt-zip.txt create mode 100644 hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/api/ArthasApiTest.java diff --git a/hugegraph-server/hugegraph-api/pom.xml b/hugegraph-server/hugegraph-api/pom.xml index 419f541831..82e8032cc2 100644 --- a/hugegraph-server/hugegraph-api/pom.xml +++ b/hugegraph-server/hugegraph-api/pom.xml @@ -15,8 +15,8 @@ License for the specific language governing permissions and limitations under the License. --> - org.apache.hugegraph @@ -153,6 +153,17 @@ swagger-jaxrs2-jakarta 2.1.9 + + + com.taobao.arthas + arthas-agent-attach + ${arthas.version} + + + com.taobao.arthas + arthas-packaging + ${arthas.version} + diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/arthas/ArthasAPI.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/arthas/ArthasAPI.java new file mode 100644 index 0000000000..549f9de0a8 --- /dev/null +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/arthas/ArthasAPI.java @@ -0,0 +1,56 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.api.arthas; + +import java.util.HashMap; + +import org.apache.hugegraph.api.API; +import org.apache.hugegraph.config.HugeConfig; +import org.apache.hugegraph.config.ServerOptions; +import org.apache.hugegraph.util.JsonUtil; + +import com.codahale.metrics.annotation.Timed; +import com.taobao.arthas.agent.attach.ArthasAgent; + +import jakarta.inject.Singleton; +import jakarta.ws.rs.PUT; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.core.Context; + +@Path("arthas") +@Singleton +public class ArthasAPI extends API { + + @Context + private jakarta.inject.Provider configProvider; + + @PUT + @Timed + @Produces(APPLICATION_JSON_WITH_CHARSET) + public Object startArthas() { + HugeConfig config = this.configProvider.get(); + HashMap configMap = new HashMap<>(4); + configMap.put("arthas.telnetPort", config.get(ServerOptions.ARTHAS_TELNET_PORT)); + configMap.put("arthas.httpPort", config.get(ServerOptions.ARTHAS_HTTP_PORT)); + configMap.put("arthas.ip", config.get(ServerOptions.ARTHAS_IP)); + configMap.put("arthas.disabledCommands", config.get(ServerOptions.ARTHAS_DISABLED_COMMANDS)); + ArthasAgent.attach(configMap); + return JsonUtil.toJson(configMap); + } +} diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/config/ServerOptions.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/config/ServerOptions.java index 6e41ae87c0..e8b999fb56 100644 --- a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/config/ServerOptions.java +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/config/ServerOptions.java @@ -272,4 +272,36 @@ public static synchronized ServerOptions instance() { disallowEmpty(), "disable" ); + + public static final ConfigOption ARTHAS_TELNET_PORT = + new ConfigOption<>( + "arthas.telnet_port", + "The telnet port provided by Arthas, it can be accessible from the outside.", + disallowEmpty(), + "8562" + ); + + public static final ConfigOption ARTHAS_HTTP_PORT = + new ConfigOption<>( + "arthas.http_port", + "The HTTP port provided by Arthas, it can be accessible from the outside.", + disallowEmpty(), + "8561" + ); + + public static final ConfigOption ARTHAS_IP = + new ConfigOption<>( + "arthas.ip", + "The IP provided by Arthas, it can be accessible from the outside.", + disallowEmpty(), + "0.0.0.0" + ); + + public static final ConfigOption ARTHAS_DISABLED_COMMANDS = + new ConfigOption<>( + "arthas.disabled_commands", + "The disabled Arthas commands due to high risk.", + null, + "jad" + ); } diff --git a/hugegraph-server/hugegraph-dist/release-docs/NOTICE b/hugegraph-server/hugegraph-dist/release-docs/NOTICE index 101ea81f45..39922da249 100644 --- a/hugegraph-server/hugegraph-dist/release-docs/NOTICE +++ b/hugegraph-server/hugegraph-dist/release-docs/NOTICE @@ -2058,3 +2058,41 @@ HPPC borrowed code, ideas or both from: (Apache license) * Koloboke, https://github.com/OpenHFT/Koloboke (Apache license) + + + +======================================================================== + +Arthas NOTICE + +======================================================================== + + +Arthas +Copyright 2018 Alibaba Group + +This product includes software developed at +Alibaba Group (https://www.alibabagroup.com/en/global/home). + +This product contains code form the greys-anatomy Project: + +The greys-anatomy Project +================= +Please visit Github for more information: +* https://github.com/oldmanpushcart/greys-anatomy + + +------------------------------------------------------------------------------- +This product contains a modified portion of 'Apache Commons Lang': +* LICENSE: + * Apache License 2.0 +* HOMEPAGE: + * https://commons.apache.org/proper/commons-lang/ + + +This product contains a modified portion of 'Apache Commons Net': +* LICENSE: + * Apache License 2.0 +* HOMEPAGE: + * https://commons.apache.org/proper/commons-net/ + diff --git a/hugegraph-server/hugegraph-dist/release-docs/licenses/LICENSE-arthas-agent-attach.txt b/hugegraph-server/hugegraph-dist/release-docs/licenses/LICENSE-arthas-agent-attach.txt new file mode 100644 index 0000000000..d645695673 --- /dev/null +++ b/hugegraph-server/hugegraph-dist/release-docs/licenses/LICENSE-arthas-agent-attach.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/hugegraph-server/hugegraph-dist/release-docs/licenses/LICENSE-arthas-packaging.txt b/hugegraph-server/hugegraph-dist/release-docs/licenses/LICENSE-arthas-packaging.txt new file mode 100644 index 0000000000..d645695673 --- /dev/null +++ b/hugegraph-server/hugegraph-dist/release-docs/licenses/LICENSE-arthas-packaging.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/hugegraph-server/hugegraph-dist/release-docs/licenses/LICENSE-byte-buddy-agent-1.11.6.txt b/hugegraph-server/hugegraph-dist/release-docs/licenses/LICENSE-byte-buddy-agent-1.11.6.txt new file mode 100644 index 0000000000..d0381d6d04 --- /dev/null +++ b/hugegraph-server/hugegraph-dist/release-docs/licenses/LICENSE-byte-buddy-agent-1.11.6.txt @@ -0,0 +1,176 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS diff --git a/hugegraph-server/hugegraph-dist/release-docs/licenses/LICENSE-zt-zip.txt b/hugegraph-server/hugegraph-dist/release-docs/licenses/LICENSE-zt-zip.txt new file mode 100644 index 0000000000..a250c1a8c1 --- /dev/null +++ b/hugegraph-server/hugegraph-dist/release-docs/licenses/LICENSE-zt-zip.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2012 ZeroTurnaround LLC. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/hugegraph-server/hugegraph-dist/scripts/dependency/known-dependencies.txt b/hugegraph-server/hugegraph-dist/scripts/dependency/known-dependencies.txt index 5e50bdb4a0..b5f5036617 100644 --- a/hugegraph-server/hugegraph-dist/scripts/dependency/known-dependencies.txt +++ b/hugegraph-server/hugegraph-dist/scripts/dependency/known-dependencies.txt @@ -7,6 +7,8 @@ annotations-4.1.1.4.jar ansj_seg-5.1.6.jar antlr-runtime-3.5.2.jar aopalliance-repackaged-3.0.1.jar +arthas-agent-attach-3.7.1.jar +arthas-packaging-3.7.1.jar asm-5.0.4.jar asm-6.0.jar asm-analysis-5.0.3.jar @@ -18,6 +20,7 @@ audience-annotations-0.5.0.jar bolt-1.6.4.jar byte-buddy-1.10.5.jar byte-buddy-agent-1.10.5.jar +byte-buddy-agent-1.11.6.jar caffeine-2.2.6.jar caffeine-2.3.1.jar cassandra-all-3.11.12.jar @@ -261,3 +264,4 @@ tracer-core-3.0.8.jar translation-1.0.4.jar util-9.0-9.0.20190305.jar validation-api-1.1.0.Final.jar +zt-zip-1.14.jar diff --git a/hugegraph-server/hugegraph-dist/src/assembly/static/conf/rest-server.properties b/hugegraph-server/hugegraph-dist/src/assembly/static/conf/rest-server.properties index 794caec84d..f6444f84fb 100644 --- a/hugegraph-server/hugegraph-dist/src/assembly/static/conf/rest-server.properties +++ b/hugegraph-server/hugegraph-dist/src/assembly/static/conf/rest-server.properties @@ -9,6 +9,12 @@ graphs=./conf/graphs batch.max_write_ratio=80 batch.max_write_threads=0 +# configuration of arthas +arthas.telnet_port=8562 +arthas.http_port=8561 +arthas.ip=0.0.0.0 +arthas.disabled_commands=jad + # authentication configs # choose 'org.apache.hugegraph.auth.StandardAuthenticator' or # 'org.apache.hugegraph.auth.ConfigAuthenticator' diff --git a/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/api/ApiTestSuite.java b/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/api/ApiTestSuite.java index a5830a4336..26e00e227a 100644 --- a/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/api/ApiTestSuite.java +++ b/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/api/ApiTestSuite.java @@ -40,7 +40,8 @@ LoginApiTest.class, ProjectApiTest.class, TraversersApiTestSuite.class, - CypherApiTest.class + CypherApiTest.class, + ArthasApiTest.class }) public class ApiTestSuite { diff --git a/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/api/ArthasApiTest.java b/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/api/ArthasApiTest.java new file mode 100644 index 0000000000..174b665fea --- /dev/null +++ b/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/api/ArthasApiTest.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.api; + +import org.junit.Before; +import org.junit.Test; + +import com.google.common.collect.ImmutableMap; + +import jakarta.ws.rs.core.Response; + +public class ArthasApiTest extends BaseApiTest { + + private static final String ARTHAS_START_PATH = "/arthas"; + private static final String ARTHAS_API_BASE_URL = "http://127.0.0.1:8561"; + private static final String ARTHAS_API_PATH = "/api"; + + @Before + public void testArthasStart() { + Response r = client().put(ARTHAS_START_PATH, "", "", ImmutableMap.of()); + assertResponseStatus(200, r); + } + + @Test + public void testArthasApi() { + String body = "{\n" + + " \"action\": \"exec\",\n" + + " \"requestId\": \"req112\",\n" + + " \"consumerId\": \"955dbd1325334a84972b0f3ac19de4f7_2\",\n" + + " \"command\": \"version\",\n" + + " \"execTimeout\": \"10000\"\n" + + "}"; + RestClient arthasApiClient = new RestClient(ARTHAS_API_BASE_URL, false); + // If request header contains basic auth, and if we are not set auth when arthas attach hg, + // arthas will auth it and return 401. ref:https://arthas.aliyun.com/en/doc/auth.html#configure-username-and-password + Response r = arthasApiClient.post(ARTHAS_API_PATH, body); + String result = assertResponseStatus(200, r); + assertJsonContains(result, "state"); + assertJsonContains(result, "requestId"); + assertJsonContains(result, "sessionId"); + assertJsonContains(result, "body"); + + RestClient arthasApiClientWithAuth = new RestClient(ARTHAS_API_BASE_URL); + r = arthasApiClientWithAuth.post(ARTHAS_API_PATH, body); + assertResponseStatus(401, r); + } +} diff --git a/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/api/BaseApiTest.java b/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/api/BaseApiTest.java index 83c1dcebe0..24b19ba1ea 100644 --- a/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/api/BaseApiTest.java +++ b/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/api/BaseApiTest.java @@ -27,13 +27,10 @@ import java.util.function.Consumer; import java.util.stream.Collectors; -import jakarta.ws.rs.client.Client; -import jakarta.ws.rs.client.ClientBuilder; -import jakarta.ws.rs.client.Entity; -import jakarta.ws.rs.client.WebTarget; -import jakarta.ws.rs.core.MultivaluedMap; -import jakarta.ws.rs.core.Response; import org.apache.http.util.TextUtils; +import org.apache.hugegraph.HugeException; +import org.apache.hugegraph.util.CollectionUtil; +import org.apache.hugegraph.util.JsonUtil; import org.glassfish.jersey.client.authentication.HttpAuthenticationFeature; import org.glassfish.jersey.client.filter.EncodingFilter; import org.glassfish.jersey.message.GZipEncoder; @@ -42,9 +39,6 @@ import org.junit.Assert; import org.junit.BeforeClass; -import org.apache.hugegraph.HugeException; -import org.apache.hugegraph.util.CollectionUtil; -import org.apache.hugegraph.util.JsonUtil; import com.fasterxml.jackson.databind.JavaType; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -52,6 +46,13 @@ import com.google.common.collect.ImmutableSet; import com.google.common.collect.Multimap; +import jakarta.ws.rs.client.Client; +import jakarta.ws.rs.client.ClientBuilder; +import jakarta.ws.rs.client.Entity; +import jakarta.ws.rs.client.WebTarget; +import jakarta.ws.rs.core.MultivaluedMap; +import jakarta.ws.rs.core.Response; + public class BaseApiTest { private static final String BASE_URL = "http://127.0.0.1:8080"; @@ -104,11 +105,17 @@ public static class RestClient { private WebTarget target; public RestClient(String url) { + this(url, true); + } + + public RestClient(String url,Boolean enableAuth) { this.client = ClientBuilder.newClient(); this.client.register(EncodingFilter.class); this.client.register(GZipEncoder.class); - this.client.register(HttpAuthenticationFeature.basic(USERNAME, - PASSWORD)); + if(enableAuth) { + this.client.register(HttpAuthenticationFeature.basic(USERNAME, + PASSWORD)); + } this.target = this.client.target(url); } diff --git a/hugegraph-server/pom.xml b/hugegraph-server/pom.xml index 4b72dd7e50..3bb04f0faf 100644 --- a/hugegraph-server/pom.xml +++ b/hugegraph-server/pom.xml @@ -114,6 +114,7 @@ 1.47.0 3.21.7 1.36 + 3.7.1 From 676307e01ebec46fc39421d6918f471686cd03d1 Mon Sep 17 00:00:00 2001 From: Dandelion <49650772+aroundabout@users.noreply.github.com> Date: Mon, 23 Oct 2023 16:17:36 +0800 Subject: [PATCH 14/24] doc: README.md tiny improve (#2331) --- hugegraph-server/hugegraph-dist/README.md | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/hugegraph-server/hugegraph-dist/README.md b/hugegraph-server/hugegraph-dist/README.md index b8e6499285..1aedb37bbe 100644 --- a/hugegraph-server/hugegraph-dist/README.md +++ b/hugegraph-server/hugegraph-dist/README.md @@ -10,7 +10,7 @@ We can use docker to quickly start an inner HugeGraph server with RocksDB in bac 2. Using docker compose - We can also use `docker-compose up -d`. The `docker-compose.yaml` is below: + Certainly we can only deploy server without other instance. Additionally, if we want to manage other HugeGraph-related instances with `server` in a single file, we can deploy HugeGraph-related instances via `docker-compose up -d`. The `docker-compose.yaml` is as below: ```yaml version: '3' @@ -27,8 +27,6 @@ If you want to **pre-load** some (test) data or graphs in container(by default), If you want to customize the pre-loaded data, please mount the the groovy scripts (not necessary). - - 1. Using docker run Use `docker run -itd --name=graph -p 18080:8080 -e PRELOAD=true -v /path/to/yourScript:/hugegraph/scripts/example.groovy hugegraph/hugegraph` @@ -36,7 +34,7 @@ If you want to customize the pre-loaded data, please mount the the groovy script 2. Using docker compose - We can also use `docker-compose up -d` to quickly start. The `docker-compose.yaml` is below: + We can also use `docker-compose up -d` to quickly start. The `docker-compose.yaml` is below. [example.groovy](https://github.com/apache/incubator-hugegraph/blob/master/hugegraph-dist/src/assembly/static/scripts/example.groovy) is a pre-defined script. If needed, we can mount a new `example.groovy` to preload different data: ```yaml version: '3' From f51273784bbd5d4282c0ebf3e41bd90c93ff613a Mon Sep 17 00:00:00 2001 From: Dandelion <49650772+aroundabout@users.noreply.github.com> Date: Mon, 23 Oct 2023 16:29:42 +0800 Subject: [PATCH 15/24] feat: support Cassandra with docker-compose in server (#2307) 1. change the dockerfile, adding the shell to wait for storage backend and use a docker-entrypoint.sh to manage the starting process. 2. delete a deprecated class in gremlin-console.sh (reference: [doc of ScriptExecutor](https://tinkerpop.apache.org/javadocs/3.2.3/full/org/apache/tinkerpop/gremlin/groovy/jsr223/ScriptExecutor.html)) 3. add a healthy check in docker-compose 4. add an example folder where we can put all the template docker-compose.yml here 5. add `*swagger-ui*` in gitignore, which appears after you compile the source code locally. --------- Co-authored-by: imbajin --- .licenserc.yaml | 1 + Dockerfile | 14 +++-- LICENSE | 1 + .../docker/docker-entrypoint.sh | 24 ++++++++ .../example/docker-compose-cassandra.yml | 61 +++++++++++++++++++ .../docker/scripts/detect-storage.groovy | 31 ++++++++++ .../docker/scripts/remote-connect.groovy | 19 ++++++ .../hugegraph-dist/release-docs/LICENSE | 1 + .../assembly/static/bin/docker-entrypoint.sh | 24 ++++++++ .../assembly/static/bin/gremlin-console.sh | 10 +-- .../src/assembly/static/bin/wait-storage.sh | 54 ++++++++++++++++ 11 files changed, 229 insertions(+), 11 deletions(-) create mode 100644 hugegraph-server/hugegraph-dist/docker/docker-entrypoint.sh create mode 100644 hugegraph-server/hugegraph-dist/docker/example/docker-compose-cassandra.yml create mode 100644 hugegraph-server/hugegraph-dist/docker/scripts/detect-storage.groovy create mode 100644 hugegraph-server/hugegraph-dist/docker/scripts/remote-connect.groovy create mode 100644 hugegraph-server/hugegraph-dist/src/assembly/static/bin/docker-entrypoint.sh create mode 100644 hugegraph-server/hugegraph-dist/src/assembly/static/bin/wait-storage.sh diff --git a/.licenserc.yaml b/.licenserc.yaml index dbe5cc2177..be040a8a18 100644 --- a/.licenserc.yaml +++ b/.licenserc.yaml @@ -104,6 +104,7 @@ header: # `header` section is configurations for source codes license header. - 'hugegraph-store/hg-store-node/src/main/java/org/apache/hugegraph/store/node/metrics/ProcfsReader.java' - 'hugegraph-store/hg-store-node/src/main/java/org/apache/hugegraph/store/node/metrics/ProcfsSmaps.java' # TODO: temporarily added to the ignore list, need handle them before releases ( ↑ ) + - 'hugegraph-dist/src/assembly/static/bin/wait-storage.sh' comment: on-failure # on what condition license-eye will comment on the pull request, `on-failure`, `always`, `never`. # license-location-threshold specifies the index threshold where the license header can be located, diff --git a/Dockerfile b/Dockerfile index e096f3430a..7dcbf2131f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -31,7 +31,8 @@ COPY --from=build /pkg/apache-hugegraph-incubating-$version/ /hugegraph LABEL maintainer="HugeGraph Docker Maintainers " # TODO: use g1gc or zgc as default -ENV JAVA_OPTS="-XX:+UnlockExperimentalVMOptions -XX:+UseContainerSupport -XX:MaxRAMPercentage=50 -XshowSettings:vm" +ENV JAVA_OPTS="-XX:+UnlockExperimentalVMOptions -XX:+UseContainerSupport -XX:MaxRAMPercentage=50 -XshowSettings:vm" \ + HUGEGRAPH_HOME="hugegraph" #COPY . /hugegraph/hugegraph-server WORKDIR /hugegraph/ @@ -50,11 +51,16 @@ RUN set -x \ # 2. Init HugeGraph Sever RUN set -e \ && pwd && cd /hugegraph/ \ - && sed -i "s/^restserver.url.*$/restserver.url=http:\/\/0.0.0.0:8080/g" ./conf/rest-server.properties \ - && ./bin/init-store.sh + && sed -i "s/^restserver.url.*$/restserver.url=http:\/\/0.0.0.0:8080/g" ./conf/rest-server.properties + +# 3. Init docker script +COPY hugegraph-dist/docker/scripts/remote-connect.groovy ./scripts +COPY hugegraph-dist/docker/scripts/detect-storage.groovy ./scripts +COPY hugegraph-dist/docker/docker-entrypoint.sh . +RUN chmod 755 ./docker-entrypoint.sh EXPOSE 8080 VOLUME /hugegraph ENTRYPOINT ["/usr/bin/dumb-init", "--"] -CMD ["./bin/start-hugegraph.sh", "-d false -j $JAVA_OPTS -g zgc"] +CMD ["./docker-entrypoint.sh"] diff --git a/LICENSE b/LICENSE index ad08080e31..cea0b74f43 100644 --- a/LICENSE +++ b/LICENSE @@ -214,5 +214,6 @@ hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/HugeScriptT hugegraph-core/src/main/java/org/apache/hugegraph/type/Nameable.java from https://github.com/JanusGraph/janusgraph hugegraph-core/src/main/java/org/apache/hugegraph/type/define/Cardinality.java from https://github.com/JanusGraph/janusgraph hugegraph-core/src/main/java/org/apache/hugegraph/util/StringEncoding.java from https://github.com/JanusGraph/janusgraph +hugegraph-dist/src/assembly/static/bin/wait-storage.sh from https://github.com/JanusGraph/janusgraph hugegraph-api/src/main/java/org/apache/hugegraph/opencypher/CypherOpProcessor.java from https://github.com/opencypher/cypher-for-gremlin hugegraph-api/src/main/java/org/apache/hugegraph/opencypher/CypherPlugin.java from https://github.com/opencypher/cypher-for-gremlin diff --git a/hugegraph-server/hugegraph-dist/docker/docker-entrypoint.sh b/hugegraph-server/hugegraph-dist/docker/docker-entrypoint.sh new file mode 100644 index 0000000000..e1fad4a9ff --- /dev/null +++ b/hugegraph-server/hugegraph-dist/docker/docker-entrypoint.sh @@ -0,0 +1,24 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with this +# work for additional information regarding copyright ownership. The ASF +# licenses this file to You under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# + + +./bin/wait-storage.sh + +./bin/init-store.sh + +./bin/start-hugegraph.sh -d false -j "$JAVA_OPTS" -g zgc diff --git a/hugegraph-server/hugegraph-dist/docker/example/docker-compose-cassandra.yml b/hugegraph-server/hugegraph-dist/docker/example/docker-compose-cassandra.yml new file mode 100644 index 0000000000..3682b02f92 --- /dev/null +++ b/hugegraph-server/hugegraph-dist/docker/example/docker-compose-cassandra.yml @@ -0,0 +1,61 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +version: "3" + +services: + graph: + image: hugegraph/hugegraph + container_name: cas-graph + ports: + - 18080:8080 + environment: + hugegraph.backend: cassandra + hugegraph.serializer: cassandra + hugegraph.cassandra.host: cas-cassandra + hugegraph.cassandra.port: 9042 + networks: + - ca-network + depends_on: + - cassandra + healthcheck: + test: ["CMD", "bin/gremlin-console.sh", "--" ,"-e", "scripts/remote-connect.groovy"] + interval: 10s + timeout: 30s + retries: 3 + + cassandra: + image: cassandra:4 + container_name: cas-cassandra + ports: + - 7000:7000 + - 9042:9042 + security_opt: + - seccomp:unconfined + networks: + - ca-network + healthcheck: + test: ["CMD", "cqlsh", "--execute", "describe keyspaces;"] + interval: 10s + timeout: 30s + retries: 5 + +networks: + ca-network: + +volumes: + hugegraph-data: diff --git a/hugegraph-server/hugegraph-dist/docker/scripts/detect-storage.groovy b/hugegraph-server/hugegraph-dist/docker/scripts/detect-storage.groovy new file mode 100644 index 0000000000..df57ade988 --- /dev/null +++ b/hugegraph-server/hugegraph-dist/docker/scripts/detect-storage.groovy @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +import org.apache.hugegraph.HugeFactory +import org.apache.hugegraph.dist.RegisterUtil + +// register all the backend to avoid changes if docker needs to support othre backend +RegisterUtil.registerPlugins() +RegisterUtil.registerRocksDB() +RegisterUtil.registerCassandra() +RegisterUtil.registerScyllaDB() +RegisterUtil.registerHBase() +RegisterUtil.registerMysql() +RegisterUtil.registerPalo() +RegisterUtil.registerPostgresql() + +graph = HugeFactory.open('./conf/graphs/hugegraph.properties') diff --git a/hugegraph-server/hugegraph-dist/docker/scripts/remote-connect.groovy b/hugegraph-server/hugegraph-dist/docker/scripts/remote-connect.groovy new file mode 100644 index 0000000000..e352cdc7e9 --- /dev/null +++ b/hugegraph-server/hugegraph-dist/docker/scripts/remote-connect.groovy @@ -0,0 +1,19 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +:remote connect tinkerpop.server conf/remote.yaml +:> hugegraph diff --git a/hugegraph-server/hugegraph-dist/release-docs/LICENSE b/hugegraph-server/hugegraph-dist/release-docs/LICENSE index f1cc9686c8..25c50c2fbb 100644 --- a/hugegraph-server/hugegraph-dist/release-docs/LICENSE +++ b/hugegraph-server/hugegraph-dist/release-docs/LICENSE @@ -220,6 +220,7 @@ The text of each license is the standard Apache 2.0 license. hugegraph-core/src/main/java/org/apache/hugegraph/type/Nameable.java from https://github.com/JanusGraph/janusgraph hugegraph-core/src/main/java/org/apache/hugegraph/type/define/Cardinality.java from https://github.com/JanusGraph/janusgraph hugegraph-core/src/main/java/org/apache/hugegraph/util/StringEncoding.java from https://github.com/JanusGraph/janusgraph +hugegraph-dist/src/assembly/static/bin/wait-storage.sh from https://github.com/JanusGraph/janusgraph hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/HugeScriptTraversal.java from https://github.com/apache/tinkerpop hugegraph-test/src/main/java/org/apache/hugegraph/tinkerpop/ProcessBasicSuite.java from https://github.com/apache/tinkerpop hugegraph-test/src/main/java/org/apache/hugegraph/tinkerpop/StructureBasicSuite.java from https://github.com/apache/tinkerpop diff --git a/hugegraph-server/hugegraph-dist/src/assembly/static/bin/docker-entrypoint.sh b/hugegraph-server/hugegraph-dist/src/assembly/static/bin/docker-entrypoint.sh new file mode 100644 index 0000000000..e1fad4a9ff --- /dev/null +++ b/hugegraph-server/hugegraph-dist/src/assembly/static/bin/docker-entrypoint.sh @@ -0,0 +1,24 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with this +# work for additional information regarding copyright ownership. The ASF +# licenses this file to You under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# + + +./bin/wait-storage.sh + +./bin/init-store.sh + +./bin/start-hugegraph.sh -d false -j "$JAVA_OPTS" -g zgc diff --git a/hugegraph-server/hugegraph-dist/src/assembly/static/bin/gremlin-console.sh b/hugegraph-server/hugegraph-dist/src/assembly/static/bin/gremlin-console.sh index b8a0fadbd2..edcdc0c403 100755 --- a/hugegraph-server/hugegraph-dist/src/assembly/static/bin/gremlin-console.sh +++ b/hugegraph-server/hugegraph-dist/src/assembly/static/bin/gremlin-console.sh @@ -84,14 +84,10 @@ PROFILING_ENABLED=false # Process options MAIN_CLASS=org.apache.tinkerpop.gremlin.console.Console -while getopts "elpv" opt; do +while getopts "lpv" opt; do case "$opt" in - e) MAIN_CLASS=org.apache.tinkerpop.gremlin.groovy.jsr223.ScriptExecutor - # Stop processing gremlin-console.sh arguments as soon as the -e switch - # is seen; everything following -e becomes arguments to the - # ScriptExecutor main class. This maintains compatibility with - # older deployments. - break;; + # class ScriptExecutor has been Deprecated. + # reference https://tinkerpop.apache.org/javadocs/3.2.3/full/org/apache/tinkerpop/gremlin/groovy/jsr223/ScriptExecutor.html l) eval GREMLIN_LOG_LEVEL=\$$OPTIND OPTIND="$(( $OPTIND + 1 ))" if [ "$GREMLIN_LOG_LEVEL" = "TRACE" -o \ diff --git a/hugegraph-server/hugegraph-dist/src/assembly/static/bin/wait-storage.sh b/hugegraph-server/hugegraph-dist/src/assembly/static/bin/wait-storage.sh new file mode 100644 index 0000000000..3a98e8c56e --- /dev/null +++ b/hugegraph-server/hugegraph-dist/src/assembly/static/bin/wait-storage.sh @@ -0,0 +1,54 @@ +#!/bin/bash +# +# Copyright 2023 JanusGraph Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +function abs_path() { + SOURCE="${BASH_SOURCE[0]}" + while [[ -h "$SOURCE" ]]; do + DIR="$(cd -P "$(dirname "$SOURCE")" && pwd)" + SOURCE="$(readlink "$SOURCE")" + [[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" + done + cd -P "$(dirname "$SOURCE")" && pwd +} + +BIN=$(abs_path) +TOP="$(cd "$BIN"/../ && pwd)" +GRAPH_CONF="$TOP/conf/graphs/hugegraph.properties" +WAIT_STORAGE_TIMEOUT_S=120 +DETECT_STORAGE="$TOP/scripts/detect-storage.groovy" + +. "$BIN"/util.sh + +# apply config from env +while IFS=' ' read -r envvar_key envvar_val; do + if [[ "${envvar_key}" =~ hugegraph\. ]] && [[ ! -z ${envvar_val} ]]; then + envvar_key=${envvar_key#"hugegraph."} + if grep -q -E "^\s*${envvar_key}\s*=\.*" ${GRAPH_CONF}; then + sed -ri "s#^(\s*${envvar_key}\s*=).*#\\1${envvar_val}#" ${GRAPH_CONF} + else + echo "${envvar_key}=${envvar_val}" >> ${GRAPH_CONF} + fi + else + continue + fi +done < <(env | sort -r | awk -F= '{ st = index($0, "="); print $1 " " substr($0, st+1) }') + +# wait for storage +if ! [ -z "${WAIT_STORAGE_TIMEOUT_S:-}" ]; then + timeout "${WAIT_STORAGE_TIMEOUT_S}s" bash -c \ + "until bin/gremlin-console.sh -- -e $DETECT_STORAGE > /dev/null 2>&1; do echo \"waiting for storage...\"; sleep 5; done" +fi From c787742105644ab1924f79f6f34bb88223c2870b Mon Sep 17 00:00:00 2001 From: Dandelion <49650772+aroundabout@users.noreply.github.com> Date: Tue, 24 Oct 2023 17:24:43 +0800 Subject: [PATCH 16/24] fix: always wait for storage if rocksdb is selected (#2333) --- .../src/assembly/static/bin/wait-storage.sh | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/hugegraph-server/hugegraph-dist/src/assembly/static/bin/wait-storage.sh b/hugegraph-server/hugegraph-dist/src/assembly/static/bin/wait-storage.sh index 3a98e8c56e..bdadeab234 100644 --- a/hugegraph-server/hugegraph-dist/src/assembly/static/bin/wait-storage.sh +++ b/hugegraph-server/hugegraph-dist/src/assembly/static/bin/wait-storage.sh @@ -48,7 +48,9 @@ while IFS=' ' read -r envvar_key envvar_val; do done < <(env | sort -r | awk -F= '{ st = index($0, "="); print $1 " " substr($0, st+1) }') # wait for storage -if ! [ -z "${WAIT_STORAGE_TIMEOUT_S:-}" ]; then - timeout "${WAIT_STORAGE_TIMEOUT_S}s" bash -c \ - "until bin/gremlin-console.sh -- -e $DETECT_STORAGE > /dev/null 2>&1; do echo \"waiting for storage...\"; sleep 5; done" +if env | grep '^hugegraph\.' > /dev/null; then + if ! [ -z "${WAIT_STORAGE_TIMEOUT_S:-}" ]; then + timeout "${WAIT_STORAGE_TIMEOUT_S}s" bash -c \ + "until bin/gremlin-console.sh -- -e $DETECT_STORAGE > /dev/null 2>&1; do echo \"waiting for storage...\"; sleep 5; done" + fi fi From f27547737150088fd659ac306409dc272befe45f Mon Sep 17 00:00:00 2001 From: Wu Chencan <77946882+DanGuge@users.noreply.github.com> Date: Tue, 24 Oct 2023 06:33:24 -0500 Subject: [PATCH 17/24] feat(core): support batch+parallel edges traverse (#2312) - Enhance Consumers.java, supporting ExceptionHandle and `Future` to handle InterruptedException when awaiting - Add Nested Iterator Edge and support batch execution - Support batch execution & thread parallel in KoutTraverser and Kneighbor --- .../backend/query/EdgesQueryIterator.java | 64 +++++ .../apache/hugegraph/task/TaskManager.java | 15 +- .../traversal/algorithm/HugeTraverser.java | 45 ++++ .../algorithm/KneighborTraverser.java | 56 +++-- .../traversal/algorithm/KoutTraverser.java | 76 +++--- .../traversal/algorithm/OltpTraverser.java | 223 +++++++++++++++++- .../algorithm/records/KneighborRecords.java | 14 +- .../traversal/algorithm/steps/Steps.java | 4 + .../org/apache/hugegraph/util/Consumers.java | 128 +++++++--- .../backend/store/rocksdb/RocksDBStore.java | 13 +- 10 files changed, 516 insertions(+), 122 deletions(-) create mode 100644 hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/EdgesQueryIterator.java diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/EdgesQueryIterator.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/EdgesQueryIterator.java new file mode 100644 index 0000000000..4ab9a8859a --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/EdgesQueryIterator.java @@ -0,0 +1,64 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.backend.query; + +import java.util.Iterator; +import java.util.List; + +import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.backend.tx.GraphTransaction; +import org.apache.hugegraph.type.define.Directions; + +public class EdgesQueryIterator implements Iterator { + + private final List labels; + private final Directions directions; + private final long limit; + private final Iterator sources; + + public EdgesQueryIterator(Iterator sources, + Directions directions, + List labels, + long limit) { + this.sources = sources; + this.labels = labels; + this.directions = directions; + // Traverse NO_LIMIT 和 Query.NO_LIMIT 不同 + this.limit = limit < 0 ? Query.NO_LIMIT : limit; + } + + @Override + public boolean hasNext() { + return sources.hasNext(); + } + + @Override + public Query next() { + Id sourceId = this.sources.next(); + ConditionQuery query = GraphTransaction.constructEdgesQuery(sourceId, + this.directions, + this.labels); + if (this.limit != Query.NO_LIMIT) { + query.limit(this.limit); + query.capacity(this.limit); + } else { + query.capacity(Query.NO_CAPACITY); + } + return query; + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/TaskManager.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/TaskManager.java index 056b7ac5a0..177af64bad 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/TaskManager.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/TaskManager.java @@ -26,16 +26,17 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; +import org.apache.hugegraph.HugeException; +import org.apache.hugegraph.HugeGraphParams; +import org.apache.hugegraph.concurrent.PausableScheduledThreadPool; import org.apache.hugegraph.type.define.NodeRole; -import org.apache.hugegraph.util.*; import org.apache.hugegraph.util.Consumers; +import org.apache.hugegraph.util.E; +import org.apache.hugegraph.util.ExecutorUtil; import org.apache.hugegraph.util.LockUtil; +import org.apache.hugegraph.util.Log; import org.slf4j.Logger; -import org.apache.hugegraph.HugeException; -import org.apache.hugegraph.HugeGraphParams; -import org.apache.hugegraph.concurrent.PausableScheduledThreadPool; - public final class TaskManager { private static final Logger LOG = Log.logger(TaskManager.class); @@ -53,7 +54,7 @@ public final class TaskManager { public static final String DISTRIBUTED_TASK_SCHEDULER = "distributed-scheduler-%d"; protected static final long SCHEDULE_PERIOD = 1000L; // unit ms - + private static final long TX_CLOSE_TIMEOUT = 30L; // unit s private static final int THREADS = 4; private static final TaskManager MANAGER = new TaskManager(THREADS); @@ -184,7 +185,7 @@ private void closeTaskTx(HugeGraphParams graph) { graph.closeTx(); } else { Consumers.executeOncePerThread(this.taskExecutor, totalThreads, - graph::closeTx); + graph::closeTx, TX_CLOSE_TIMEOUT); } } catch (Exception e) { throw new HugeException("Exception when closing task tx", e); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/HugeTraverser.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/HugeTraverser.java index f5415d9c51..194576e857 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/HugeTraverser.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/HugeTraverser.java @@ -17,6 +17,8 @@ package org.apache.hugegraph.traversal.algorithm; +import java.io.Closeable; +import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -37,6 +39,7 @@ import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.query.Aggregate; import org.apache.hugegraph.backend.query.ConditionQuery; +import org.apache.hugegraph.backend.query.EdgesQueryIterator; import org.apache.hugegraph.backend.query.Query; import org.apache.hugegraph.backend.query.QueryResults; import org.apache.hugegraph.backend.tx.GraphTransaction; @@ -66,6 +69,7 @@ import org.apache.hugegraph.util.collection.ObjectIntMapping; import org.apache.hugegraph.util.collection.ObjectIntMappingFactory; import org.apache.tinkerpop.gremlin.structure.Edge; +import org.apache.tinkerpop.gremlin.structure.util.CloseableIterator; import org.slf4j.Logger; import com.google.common.collect.ImmutableList; @@ -465,6 +469,13 @@ private Iterator edgesOfVertex(Id source, EdgeStep edgeStep, return edgeStep.skipSuperNodeIfNeeded(edges); } + public EdgesIterator edgesOfVertices(Iterator sources, + Directions dir, + List labelIds, + long degree) { + return new EdgesIterator(new EdgesQueryIterator(sources, dir, labelIds, degree)); + } + public Iterator edgesOfVertex(Id source, Steps steps) { List edgeLabels = steps.edgeLabels(); ConditionQuery cq = GraphTransaction.constructEdgesQuery( @@ -474,6 +485,11 @@ public Iterator edgesOfVertex(Id source, Steps steps) { cq.limit(steps.limit()); } + if (steps.isEdgeEmpty()) { + Iterator edges = this.graph().edges(cq); + return edgesOfVertexStep(edges, steps); + } + Map edgeConditions = getFilterQueryConditions(steps.edgeSteps(), HugeType.EDGE); @@ -1004,4 +1020,33 @@ public Set getEdges(Iterator vertexIter) { return edges; } } + + public class EdgesIterator implements Iterator>, Closeable { + + private final Iterator> currentIter; + + public EdgesIterator(EdgesQueryIterator queries) { + List> iteratorList = new ArrayList<>(); + while (queries.hasNext()) { + Iterator edges = graph.edges(queries.next()); + iteratorList.add(edges); + } + this.currentIter = iteratorList.iterator(); + } + + @Override + public boolean hasNext() { + return this.currentIter.hasNext(); + } + + @Override + public Iterator next() { + return this.currentIter.next(); + } + + @Override + public void close() throws IOException { + CloseableIterator.closeIterator(currentIter); + } + } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/KneighborTraverser.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/KneighborTraverser.java index 9f16f480b2..565d0af5f6 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/KneighborTraverser.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/KneighborTraverser.java @@ -17,11 +17,11 @@ package org.apache.hugegraph.traversal.algorithm; -import java.util.Iterator; import java.util.Set; import java.util.function.Consumer; import org.apache.hugegraph.HugeGraph; +import org.apache.hugegraph.backend.id.EdgeId; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.structure.HugeEdge; import org.apache.hugegraph.traversal.algorithm.records.KneighborRecords; @@ -48,25 +48,27 @@ public Set kneighbor(Id sourceV, Directions dir, Id labelId = this.getEdgeLabelId(label); - Set latest = newSet(); - Set all = newSet(); + KneighborRecords records = new KneighborRecords(true, sourceV, true); - latest.add(sourceV); - this.vertexIterCounter.addAndGet(1L); + Consumer consumer = edgeId -> { + if (this.reachLimit(limit, records.size())) { + return; + } + records.addPath(edgeId.ownerVertexId(), edgeId.otherVertexId()); + }; while (depth-- > 0) { - long remaining = limit == NO_LIMIT ? NO_LIMIT : limit - all.size(); - latest = this.adjacentVertices(sourceV, latest, dir, labelId, - all, degree, remaining); - all.addAll(latest); - this.vertexIterCounter.addAndGet(1L); - this.edgeIterCounter.addAndGet(latest.size()); - if (reachLimit(limit, all.size())) { + records.startOneLayer(true); + traverseIdsByBfs(records.keys(), dir, labelId, degree, NO_LIMIT, consumer); + records.finishOneLayer(); + if (reachLimit(limit, records.size())) { break; } } - return all; + this.vertexIterCounter.addAndGet(records.size()); + + return records.idsBySet(limit); } public KneighborRecords customizedKneighbor(Id source, Steps steps, @@ -76,33 +78,29 @@ public KneighborRecords customizedKneighbor(Id source, Steps steps, checkPositive(maxDepth, "k-neighbor max_depth"); checkLimit(limit); - boolean concurrent = maxDepth >= this.concurrentDepth(); - - KneighborRecords records = new KneighborRecords(concurrent, + KneighborRecords records = new KneighborRecords(true, source, true); - Consumer consumer = v -> { + Consumer consumer = edge -> { if (this.reachLimit(limit, records.size())) { return; } - Iterator edges = edgesOfVertex(v, steps); - this.vertexIterCounter.addAndGet(1L); - while (!this.reachLimit(limit, records.size()) && edges.hasNext()) { - HugeEdge edge = (HugeEdge) edges.next(); - Id target = edge.id().otherVertexId(); - records.addPath(v, target); - - records.edgeResults().addEdge(v, target, edge); - - this.edgeIterCounter.addAndGet(1L); - } + EdgeId edgeId = ((HugeEdge) edge).id(); + records.addPath(edgeId.ownerVertexId(), edgeId.otherVertexId()); + records.edgeResults().addEdge(edgeId.ownerVertexId(), edgeId.otherVertexId(), edge); }; while (maxDepth-- > 0) { records.startOneLayer(true); - traverseIds(records.keys(), consumer, concurrent); + traverseIdsByBfs(records.keys(), steps, NO_LIMIT, consumer); records.finishOneLayer(); + if (this.reachLimit(limit, records.size())) { + break; + } } + + this.vertexIterCounter.addAndGet(records.size()); + return records; } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/KoutTraverser.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/KoutTraverser.java index 9924c766c5..c683694c14 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/KoutTraverser.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/KoutTraverser.java @@ -18,12 +18,15 @@ package org.apache.hugegraph.traversal.algorithm; import java.util.Iterator; +import java.util.List; import java.util.Set; import java.util.function.Consumer; import org.apache.hugegraph.HugeException; import org.apache.hugegraph.HugeGraph; +import org.apache.hugegraph.backend.id.EdgeId; import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.backend.query.Query; import org.apache.hugegraph.structure.HugeEdge; import org.apache.hugegraph.traversal.algorithm.records.KoutRecords; import org.apache.hugegraph.traversal.algorithm.steps.Steps; @@ -57,34 +60,45 @@ public Set kout(Id sourceV, Directions dir, String label, Id labelId = this.getEdgeLabelId(label); - Set latest = newIdSet(); - latest.add(sourceV); + Set sources = newIdSet(); + Set neighbors = newIdSet(); + Set visited = nearest ? newIdSet() : null; - Set all = newIdSet(); - all.add(sourceV); + neighbors.add(sourceV); + + ConcurrentVerticesConsumer consumer; + + long remaining = capacity == NO_LIMIT ? NO_LIMIT : capacity - 1; - long remaining = capacity == NO_LIMIT ? - NO_LIMIT : capacity - latest.size(); - this.vertexIterCounter.addAndGet(1L); while (depth-- > 0) { // Just get limit nodes in last layer if limit < remaining capacity if (depth == 0 && limit != NO_LIMIT && (limit < remaining || remaining == NO_LIMIT)) { remaining = limit; } - if (nearest) { - latest = this.adjacentVertices(sourceV, latest, dir, labelId, - all, degree, remaining); - all.addAll(latest); - } else { - latest = this.adjacentVertices(sourceV, latest, dir, labelId, - null, degree, remaining); + + if (visited != null) { + visited.addAll(neighbors); } - this.vertexIterCounter.addAndGet(1L); - this.edgeIterCounter.addAndGet(latest.size()); + + // swap sources and neighbors + Set tmp = neighbors; + neighbors = sources; + sources = tmp; + + // start + consumer = new ConcurrentVerticesConsumer(sourceV, visited, remaining, neighbors); + + this.vertexIterCounter.addAndGet(sources.size()); + this.edgeIterCounter.addAndGet(neighbors.size()); + + traverseIdsByBfs(sources.iterator(), dir, labelId, degree, capacity, consumer); + + sources.clear(); + if (capacity != NO_LIMIT) { // Update 'remaining' value to record remaining capacity - remaining -= latest.size(); + remaining -= neighbors.size(); if (remaining <= 0 && depth > 0) { throw new HugeException( @@ -94,7 +108,7 @@ public Set kout(Id sourceV, Directions dir, String label, } } - return latest; + return neighbors; } public KoutRecords customizedKout(Id source, Steps steps, @@ -107,33 +121,25 @@ public KoutRecords customizedKout(Id source, Steps steps, checkLimit(limit); long[] depth = new long[1]; depth[0] = maxDepth; - boolean concurrent = maxDepth >= this.concurrentDepth(); - KoutRecords records = new KoutRecords(concurrent, source, nearest, 0); + KoutRecords records = new KoutRecords(true, source, nearest, 0); - Consumer consumer = v -> { + Consumer consumer = edge -> { if (this.reachLimit(limit, depth[0], records.size())) { return; } - Iterator edges = edgesOfVertex(v, steps); - this.vertexIterCounter.addAndGet(1L); - while (!this.reachLimit(limit, depth[0], records.size()) && - edges.hasNext()) { - HugeEdge edge = (HugeEdge) edges.next(); - Id target = edge.id().otherVertexId(); - records.addPath(v, target); - this.checkCapacity(capacity, records.accessed(), depth[0]); - - records.edgeResults().addEdge(v, target, edge); - - this.edgeIterCounter.addAndGet(1L); - } + EdgeId edgeId = ((HugeEdge) edge).id(); + records.addPath(edgeId.ownerVertexId(), edgeId.otherVertexId()); + records.edgeResults().addEdge(edgeId.ownerVertexId(), edgeId.otherVertexId(), edge); }; while (depth[0]-- > 0) { + List sources = records.ids(Query.NO_LIMIT); records.startOneLayer(true); - this.traverseIds(records.keys(), consumer, concurrent); + traverseIdsByBfs(sources.iterator(), steps, capacity, consumer); + this.vertexIterCounter.addAndGet(sources.size()); records.finishOneLayer(); + checkCapacity(capacity, records.accessed(), depth[0]); } return records; } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/OltpTraverser.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/OltpTraverser.java index b05de24228..c05d8f89f4 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/OltpTraverser.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/OltpTraverser.java @@ -17,24 +17,36 @@ package org.apache.hugegraph.traversal.algorithm; +import java.util.Collections; import java.util.Iterator; import java.util.List; +import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Consumer; -import com.google.common.base.Objects; import org.apache.commons.lang3.tuple.Pair; import org.apache.hugegraph.HugeGraph; +import org.apache.hugegraph.backend.id.EdgeId; import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.backend.query.EdgesQueryIterator; import org.apache.hugegraph.config.CoreOptions; +import org.apache.hugegraph.iterator.FilterIterator; +import org.apache.hugegraph.iterator.MapperIterator; +import org.apache.hugegraph.structure.HugeEdge; +import org.apache.hugegraph.traversal.algorithm.steps.Steps; +import org.apache.hugegraph.type.define.Directions; import org.apache.hugegraph.util.Consumers; +import org.apache.tinkerpop.gremlin.structure.Edge; import org.apache.tinkerpop.gremlin.structure.Element; import org.apache.tinkerpop.gremlin.structure.Property; import org.apache.tinkerpop.gremlin.structure.Vertex; import org.apache.tinkerpop.gremlin.structure.util.CloseableIterator; -import org.apache.hugegraph.iterator.FilterIterator; +import com.google.common.base.Objects; public abstract class OltpTraverser extends HugeTraverser implements AutoCloseable { @@ -75,7 +87,7 @@ public static void destroy() { protected long traversePairs(Iterator> pairs, Consumer> consumer) { - return this.traverse(pairs, consumer, "traverse-pairs"); + return this.traverseByOne(pairs, consumer, "traverse-pairs"); } protected long traverseIds(Iterator ids, Consumer consumer, @@ -93,18 +105,19 @@ protected long traverseIds(Iterator ids, Consumer consumer, } protected long traverseIds(Iterator ids, Consumer consumer) { - return this.traverse(ids, consumer, "traverse-ids"); + return this.traverseByOne(ids, consumer, "traverse-ids"); } - protected long traverse(Iterator iterator, Consumer consumer, - String name) { + protected long traverseByOne(Iterator iterator, + Consumer consumer, + String taskName) { if (!iterator.hasNext()) { return 0L; } Consumers consumers = new Consumers<>(executors.getExecutor(), consumer, null); - consumers.start(name); + consumers.start(taskName); long total = 0L; try { while (iterator.hasNext()) { @@ -129,11 +142,101 @@ protected long traverse(Iterator iterator, Consumer consumer, return total; } + protected void traverseIdsByBfs(Iterator vertices, + Directions dir, + Id label, + long degree, + long capacity, + Consumer consumer) { + List labels = label == null ? Collections.emptyList() : + Collections.singletonList(label); + OneStepEdgeIterConsumer edgeIterConsumer = new OneStepEdgeIterConsumer(consumer, capacity); + + EdgesIterator edgeIter = edgesOfVertices(vertices, dir, labels, degree); + + // parallel out-of-order execution + this.traverseByBatch(edgeIter, edgeIterConsumer, "traverse-bfs-step", 1); + } + + protected void traverseIdsByBfs(Iterator vertices, + Steps steps, + long capacity, + Consumer consumer) { + StepsEdgeIterConsumer edgeIterConsumer = + new StepsEdgeIterConsumer(consumer, capacity, steps); + + EdgesQueryIterator queryIterator = new EdgesQueryIterator(vertices, + steps.direction(), + steps.edgeLabels(), + steps.degree()); + + // get Iterator> from Iterator + EdgesIterator edgeIter = new EdgesIterator(queryIterator); + + // parallel out-of-order execution + this.traverseByBatch(edgeIter, edgeIterConsumer, "traverse-bfs-steps", 1); + } + + protected long traverseByBatch(Iterator> sources, + Consumer> consumer, + String taskName, int concurrentWorkers) { + if (!sources.hasNext()) { + return 0L; + } + AtomicBoolean done = new AtomicBoolean(false); + Consumers> consumers = null; + try { + consumers = buildConsumers(consumer, concurrentWorkers, done, + executors.getExecutor()); + return startConsumers(sources, taskName, done, consumers); + } finally { + assert consumers != null; + executors.returnExecutor(consumers.executor()); + } + } + + private long startConsumers(Iterator> sources, + String taskName, + AtomicBoolean done, + Consumers> consumers) { + long total = 0L; + try { + consumers.start(taskName); + while (sources.hasNext() && !done.get()) { + total++; + Iterator v = sources.next(); + consumers.provide(v); + } + } catch (Consumers.StopExecution e) { + // pass + } catch (Throwable e) { + throw Consumers.wrapException(e); + } finally { + try { + consumers.await(); + } catch (Throwable e) { + throw Consumers.wrapException(e); + } finally { + CloseableIterator.closeIterator(sources); + } + } + return total; + } + + private Consumers> buildConsumers(Consumer> consumer, + int queueSizePerWorker, + AtomicBoolean done, + ExecutorService executor) { + return new Consumers<>(executor, + consumer, + null, + e -> done.set(true), + queueSizePerWorker); + } + protected Iterator filter(Iterator vertices, String key, Object value) { - return new FilterIterator<>(vertices, vertex -> { - return match(vertex, key, value); - }); + return new FilterIterator<>(vertices, vertex -> match(vertex, key, value)); } protected boolean match(Element elem, String key, Object value) { @@ -175,4 +278,104 @@ public List getValues(K key) { return values; } } + + public static class ConcurrentVerticesConsumer implements Consumer { + + private final Id sourceV; + private final Set excluded; + private final Set neighbors; + private final long limit; + private final AtomicInteger count; + + public ConcurrentVerticesConsumer(Id sourceV, Set excluded, long limit, + Set neighbors) { + this.sourceV = sourceV; + this.excluded = excluded; + this.limit = limit; + this.neighbors = neighbors; + this.count = new AtomicInteger(0); + } + + @Override + public void accept(EdgeId edgeId) { + if (this.limit != NO_LIMIT && count.get() >= this.limit) { + throw new Consumers.StopExecution("reach limit"); + } + + Id targetV = edgeId.otherVertexId(); + if (this.sourceV.equals(targetV)) { + return; + } + + if (this.excluded != null && this.excluded.contains(targetV)) { + return; + } + + if (this.neighbors.add(targetV)) { + if (this.limit != NO_LIMIT) { + this.count.getAndIncrement(); + } + } + } + } + + public abstract class EdgesConsumer implements Consumer> { + + private final Consumer consumer; + private final long capacity; + + public EdgesConsumer(Consumer consumer, long capacity) { + this.consumer = consumer; + this.capacity = capacity; + } + + protected abstract Iterator prepare(Iterator iter); + + @Override + public void accept(Iterator edgeIter) { + Iterator ids = prepare(edgeIter); + long counter = 0; + while (ids.hasNext()) { + if (Thread.currentThread().isInterrupted()) { + LOG.warn("Consumer is Interrupted"); + break; + } + counter++; + this.consumer.accept(ids.next()); + } + long total = edgeIterCounter.addAndGet(counter); + // traverse by batch & improve performance + if (this.capacity != NO_LIMIT && total >= this.capacity) { + throw new Consumers.StopExecution("reach capacity"); + } + } + } + + public class OneStepEdgeIterConsumer extends EdgesConsumer { + + public OneStepEdgeIterConsumer(Consumer consumer, long capacity) { + super(consumer, capacity); + } + + @Override + protected Iterator prepare(Iterator edgeIter) { + return new MapperIterator<>(edgeIter, (e) -> ((HugeEdge) e).id()); + } + } + + public class StepsEdgeIterConsumer extends EdgesConsumer { + + private final Steps steps; + + public StepsEdgeIterConsumer(Consumer consumer, long capacity, + Steps steps) { + super(consumer, capacity); + this.steps = steps; + } + + @Override + protected Iterator prepare(Iterator edgeIter) { + return edgesOfVertexStep(edgeIter, this.steps); + } + } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/records/KneighborRecords.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/records/KneighborRecords.java index 7e04a286c3..649b1c2116 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/records/KneighborRecords.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/records/KneighborRecords.java @@ -19,7 +19,9 @@ import static org.apache.hugegraph.traversal.algorithm.HugeTraverser.NO_LIMIT; +import java.util.Collection; import java.util.List; +import java.util.Set; import java.util.Stack; import org.apache.hugegraph.backend.id.Id; @@ -45,6 +47,17 @@ public int size() { @Override public List ids(long limit) { List ids = CollectionFactory.newList(CollectionType.EC); + this.getRecords(limit, ids); + return ids; + } + + public Set idsBySet(long limit) { + Set ids = CollectionFactory.newSet(CollectionType.EC); + this.getRecords(limit, ids); + return ids; + } + + private void getRecords(long limit, Collection ids) { Stack records = this.records(); // Not include record(i=0) to ignore source vertex for (int i = 1; i < records.size(); i++) { @@ -54,7 +67,6 @@ public List ids(long limit) { limit--; } } - return ids; } @Override diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/steps/Steps.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/steps/Steps.java index d1a9238be1..c2a1a7e1e1 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/steps/Steps.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/algorithm/steps/Steps.java @@ -138,6 +138,10 @@ public List edgeLabels() { return new ArrayList<>(this.edgeSteps.keySet()); } + public boolean isEdgeEmpty() { + return this.edgeSteps.isEmpty(); + } + public boolean isVertexEmpty() { return this.vertexSteps.isEmpty(); } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/Consumers.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/Consumers.java index 00689e0c5e..06e678fd98 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/Consumers.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/util/Consumers.java @@ -27,16 +27,16 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; +import java.util.concurrent.Future; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Consumer; -import org.apache.hugegraph.config.CoreOptions; -import org.slf4j.Logger; - import org.apache.hugegraph.HugeException; +import org.apache.hugegraph.config.CoreOptions; import org.apache.hugegraph.task.TaskManager.ContextCallable; +import org.slf4j.Logger; public final class Consumers { @@ -46,16 +46,16 @@ public final class Consumers { private static final Logger LOG = Log.logger(Consumers.class); + private final V QUEUE_END = (V) new Object(); private final ExecutorService executor; private final Consumer consumer; - private final Runnable done; - + private final Runnable doneHandle; + private final Consumer exceptionHandle; private final int workers; + private final List runningFutures; private final int queueSize; private final CountDownLatch latch; private final BlockingQueue queue; - - private volatile boolean ending = false; private volatile Throwable exception = null; public Consumers(ExecutorService executor, Consumer consumer) { @@ -63,23 +63,40 @@ public Consumers(ExecutorService executor, Consumer consumer) { } public Consumers(ExecutorService executor, - Consumer consumer, Runnable done) { + Consumer consumer, Runnable doneHandle) { + this(executor, consumer, doneHandle, QUEUE_WORKER_SIZE); + } + + public Consumers(ExecutorService executor, + Consumer consumer, + Runnable doneHandle, + int queueSizePerWorker) { + this(executor, consumer, doneHandle, null, queueSizePerWorker); + } + + public Consumers(ExecutorService executor, + Consumer consumer, + Runnable doneHandle, + Consumer exceptionHandle, + int queueSizePerWorker) { this.executor = executor; this.consumer = consumer; - this.done = done; + this.doneHandle = doneHandle; + this.exceptionHandle = exceptionHandle; int workers = THREADS; if (this.executor instanceof ThreadPoolExecutor) { workers = ((ThreadPoolExecutor) this.executor).getCorePoolSize(); } this.workers = workers; - this.queueSize = QUEUE_WORKER_SIZE * workers; + + this.runningFutures = new ArrayList<>(workers); + this.queueSize = queueSizePerWorker * workers + 1; this.latch = new CountDownLatch(workers); this.queue = new ArrayBlockingQueue<>(this.queueSize); } public void start(String name) { - this.ending = false; this.exception = null; if (this.executor == null) { return; @@ -87,7 +104,8 @@ public void start(String name) { LOG.info("Starting {} workers[{}] with queue size {}...", this.workers, name, this.queueSize); for (int i = 0; i < this.workers; i++) { - this.executor.submit(new ContextCallable<>(this::runAndDone)); + this.runningFutures.add( + this.executor.submit(new ContextCallable<>(this::runAndDone))); } } @@ -95,11 +113,15 @@ private Void runAndDone() { try { this.run(); } catch (Throwable e) { - // Only the first exception of one thread can be stored - this.exception = e; - if (!(e instanceof StopExecution)) { + if (e instanceof StopExecution) { + this.queue.clear(); + putQueueEnd(); + } else { + // Only the first exception to one thread can be stored + this.exception = e; LOG.error("Error when running task", e); } + exceptionHandle(e); } finally { this.done(); this.latch.countDown(); @@ -109,11 +131,7 @@ private Void runAndDone() { private void run() { LOG.debug("Start to work..."); - while (!this.ending) { - this.consume(); - } - assert this.ending; - while (this.consume()){ + while (this.consume()) { // ignore } @@ -121,14 +139,18 @@ private void run() { } private boolean consume() { - V elem; - try { - elem = this.queue.poll(CONSUMER_WAKE_PERIOD, TimeUnit.MILLISECONDS); - } catch (InterruptedException e) { - // ignore - return true; + V elem = null; + while (elem == null) { + try { + elem = this.queue.poll(CONSUMER_WAKE_PERIOD, TimeUnit.MILLISECONDS); + } catch (InterruptedException e) { + // ignore + return false; + } } - if (elem == null) { + + if (elem == QUEUE_END) { + putQueueEnd(); return false; } // do job @@ -136,13 +158,29 @@ private boolean consume() { return true; } + private void exceptionHandle(Throwable e) { + if (this.exceptionHandle == null) { + return; + } + + try { + this.exceptionHandle.accept(e); + } catch (Throwable ex) { + if (this.exception == null) { + this.exception = ex; + } else { + LOG.warn("Error while calling exceptionHandle()", ex); + } + } + } + private void done() { - if (this.done == null) { + if (this.doneHandle == null) { return; } try { - this.done.run(); + this.doneHandle.run(); } catch (Throwable e) { if (this.exception == null) { this.exception = e; @@ -169,6 +207,16 @@ public void provide(V v) throws Throwable { } else { try { this.queue.put(v); + } catch (InterruptedException e) { + LOG.warn("Interrupt while queuing QUEUE_END", e); + } + } + } + + private void putQueueEnd() { + if (this.executor != null) { + try { + this.queue.put(QUEUE_END); } catch (InterruptedException e) { LOG.warn("Interrupted while enqueue", e); } @@ -176,15 +224,18 @@ public void provide(V v) throws Throwable { } public void await() throws Throwable { - this.ending = true; if (this.executor == null) { // call done() directly if without thread pool this.done(); } else { try { + putQueueEnd(); this.latch.await(); } catch (InterruptedException e) { String error = "Interrupted while waiting for consumers"; + for (Future f : this.runningFutures) { + f.cancel(true); + } this.exception = new HugeException(error, e); LOG.warn(error, e); } @@ -201,7 +252,8 @@ public ExecutorService executor() { public static void executeOncePerThread(ExecutorService executor, int totalThreads, - Runnable callback) + Runnable callback, + long invokeTimeout) throws InterruptedException { // Ensure callback execute at least once for every thread final Map threadsTimes = new ConcurrentHashMap<>(); @@ -230,7 +282,7 @@ public static void executeOncePerThread(ExecutorService executor, for (int i = 0; i < totalThreads; i++) { tasks.add(task); } - executor.invokeAll(tasks); + executor.invokeAll(tasks, invokeTimeout, TimeUnit.SECONDS); } public static ExecutorService newThreadPool(String prefix, int workers) { @@ -290,13 +342,21 @@ public synchronized ExecutorService getExecutor() { public synchronized void returnExecutor(ExecutorService executor) { E.checkNotNull(executor, "executor"); if (!this.executors.offer(executor)) { - executor.shutdown(); + try { + executor.shutdown(); + } catch (Exception e) { + LOG.warn("close ExecutorService with error:", e); + } } } public synchronized void destroy() { for (ExecutorService executor : this.executors) { - executor.shutdown(); + try { + executor.shutdownNow(); + } catch (Exception e) { + LOG.warn("close ExecutorService with error:", e); + } } this.executors.clear(); } diff --git a/hugegraph-server/hugegraph-rocksdb/src/main/java/org/apache/hugegraph/backend/store/rocksdb/RocksDBStore.java b/hugegraph-server/hugegraph-rocksdb/src/main/java/org/apache/hugegraph/backend/store/rocksdb/RocksDBStore.java index 1d0cdba7b6..ca1058b9ac 100644 --- a/hugegraph-server/hugegraph-rocksdb/src/main/java/org/apache/hugegraph/backend/store/rocksdb/RocksDBStore.java +++ b/hugegraph-server/hugegraph-rocksdb/src/main/java/org/apache/hugegraph/backend/store/rocksdb/RocksDBStore.java @@ -44,9 +44,6 @@ import java.util.stream.Collectors; import org.apache.commons.io.FileUtils; -import org.rocksdb.RocksDBException; -import org.slf4j.Logger; - import org.apache.hugegraph.HugeException; import org.apache.hugegraph.backend.BackendException; import org.apache.hugegraph.backend.id.Id; @@ -69,6 +66,9 @@ import org.apache.hugegraph.util.ExecutorUtil; import org.apache.hugegraph.util.InsertionOrderUtil; import org.apache.hugegraph.util.Log; +import org.rocksdb.RocksDBException; +import org.slf4j.Logger; + import com.google.common.collect.ImmutableList; public abstract class RocksDBStore extends AbstractBackendStore { @@ -93,7 +93,8 @@ public abstract class RocksDBStore extends AbstractBackendStore Date: Wed, 25 Oct 2023 04:25:07 -0500 Subject: [PATCH 18/24] fix(core): handle schema Cache expandCapacity concurrent problem (#2332) --- .../backend/store/ram/IntObjectMap.java | 9 +-- .../apache/hugegraph/unit/UnitTestSuite.java | 6 +- .../unit/store/RamIntObjectMapTest.java | 72 +++++++++++++++++++ 3 files changed, 82 insertions(+), 5 deletions(-) create mode 100644 hugegraph-test/src/main/java/org/apache/hugegraph/unit/store/RamIntObjectMapTest.java diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/ram/IntObjectMap.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/ram/IntObjectMap.java index 78af531a07..735f423ce8 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/ram/IntObjectMap.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/ram/IntObjectMap.java @@ -29,8 +29,8 @@ public final class IntObjectMap implements RamMap { private static final float DEFAULT_INITIAL_FACTOR = 0.25f; private final int maxSize; - private int currentSize; - private Object[] array; + private volatile int currentSize; + private volatile Object[] array; public IntObjectMap(int size) { this.maxSize = size; @@ -79,10 +79,11 @@ private synchronized void expandCapacity() { if (this.currentSize == this.maxSize) { return; } - this.currentSize = Math.min(this.currentSize * 2, this.maxSize); - Object[] newArray = new Object[this.currentSize]; + int newSize = Math.min(this.currentSize * 2, this.maxSize); + Object[] newArray = new Object[newSize]; System.arraycopy(this.array, 0, newArray, 0, this.array.length); this.clear(); this.array = newArray; + this.currentSize = newSize; } } diff --git a/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/unit/UnitTestSuite.java b/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/unit/UnitTestSuite.java index 3b8071f9f1..d72269a4f5 100644 --- a/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/unit/UnitTestSuite.java +++ b/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/unit/UnitTestSuite.java @@ -27,6 +27,7 @@ import org.apache.hugegraph.unit.rocksdb.RocksDBCountersTest; import org.apache.hugegraph.unit.rocksdb.RocksDBSessionTest; import org.apache.hugegraph.unit.rocksdb.RocksDBSessionsTest; +import org.apache.hugegraph.unit.store.RamIntObjectMapTest; import org.junit.runner.RunWith; import org.junit.runners.Suite; @@ -148,7 +149,10 @@ Int2IntsMapTest.class, IdSetTest.class, IntMapTest.class, - IntSetTest.class + IntSetTest.class, + + /* store */ + RamIntObjectMapTest.class }) public class UnitTestSuite { } diff --git a/hugegraph-test/src/main/java/org/apache/hugegraph/unit/store/RamIntObjectMapTest.java b/hugegraph-test/src/main/java/org/apache/hugegraph/unit/store/RamIntObjectMapTest.java new file mode 100644 index 0000000000..4e9fe4d95b --- /dev/null +++ b/hugegraph-test/src/main/java/org/apache/hugegraph/unit/store/RamIntObjectMapTest.java @@ -0,0 +1,72 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.unit.store; + +import java.util.concurrent.CountDownLatch; + +import org.apache.hugegraph.backend.store.ram.IntObjectMap; +import org.junit.Assert; +import org.junit.Test; + +public class RamIntObjectMapTest { + + @Test + public void testConcurrency() { + int size = 32; + IntObjectMap map = new IntObjectMap<>(size); + + final int numThreads = 10; + final CountDownLatch startSignal = new CountDownLatch(1); + final CountDownLatch doneSignal = new CountDownLatch(numThreads); + + for (int i = 0; i < numThreads; i++) { + new Thread(() -> { + try { + startSignal.await(); + } catch (InterruptedException e) { + e.printStackTrace(); + Assert.fail(e.getMessage()); + } + + for (int j = 0; j < size; j++) { + map.set(j, j); + } + + doneSignal.countDown(); + }).start(); + } + + startSignal.countDown(); + + try { + doneSignal.await(); + } catch (InterruptedException e) { + e.printStackTrace(); + Assert.fail(e.getMessage()); + } + + for (int i = 0; i < numThreads; i++) { + new Thread(() -> { + for (int j = 0; j < size; j++) { + Integer value = map.get(j); + Assert.assertNotNull(value); + } + }).start(); + } + } +} From 78c4af5514c8a210a615e2f66f52191c535fa5c1 Mon Sep 17 00:00:00 2001 From: VGalaxies Date: Fri, 10 Nov 2023 11:58:25 +0800 Subject: [PATCH 19/24] chore: move RamIntObjectMapTest.java --- .../java/org/apache/hugegraph/unit/store/RamIntObjectMapTest.java | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename {hugegraph-test => hugegraph-server/hugegraph-test}/src/main/java/org/apache/hugegraph/unit/store/RamIntObjectMapTest.java (100%) diff --git a/hugegraph-test/src/main/java/org/apache/hugegraph/unit/store/RamIntObjectMapTest.java b/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/unit/store/RamIntObjectMapTest.java similarity index 100% rename from hugegraph-test/src/main/java/org/apache/hugegraph/unit/store/RamIntObjectMapTest.java rename to hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/unit/store/RamIntObjectMapTest.java From 5b8ac6fa3aeea01724e5010497c162f548fb7551 Mon Sep 17 00:00:00 2001 From: lzyxx <94185075+lzyxx77@users.noreply.github.com> Date: Sat, 28 Oct 2023 23:58:17 +0800 Subject: [PATCH 20/24] feat(cassandra): adapt cassandra from 3.11.12 to 4.0.10 (#2300) --- hugegraph-server/hugegraph-cassandra/pom.xml | 2 +- .../store/cassandra/CassandraMetrics.java | 12 ++--- .../store/cassandra/CassandraShard.java | 4 +- .../hugegraph-dist/release-docs/LICENSE | 2 +- .../scripts/dependency/known-dependencies.txt | 50 +++++++++++-------- .../src/assembly/travis/install-cassandra.sh | 2 +- 6 files changed, 41 insertions(+), 31 deletions(-) diff --git a/hugegraph-server/hugegraph-cassandra/pom.xml b/hugegraph-server/hugegraph-cassandra/pom.xml index 5b2c4152b6..888f6dd7f8 100644 --- a/hugegraph-server/hugegraph-cassandra/pom.xml +++ b/hugegraph-server/hugegraph-cassandra/pom.xml @@ -37,7 +37,7 @@ org.apache.cassandra cassandra-all - 3.11.12 + 4.0.10 org.slf4j diff --git a/hugegraph-server/hugegraph-cassandra/src/main/java/org/apache/hugegraph/backend/store/cassandra/CassandraMetrics.java b/hugegraph-server/hugegraph-cassandra/src/main/java/org/apache/hugegraph/backend/store/cassandra/CassandraMetrics.java index 8759346764..f2f2931c62 100644 --- a/hugegraph-server/hugegraph-cassandra/src/main/java/org/apache/hugegraph/backend/store/cassandra/CassandraMetrics.java +++ b/hugegraph-server/hugegraph-cassandra/src/main/java/org/apache/hugegraph/backend/store/cassandra/CassandraMetrics.java @@ -112,7 +112,7 @@ protected Map getMetricsByHost(String host) { metrics.put(DISK_USAGE, UnitUtil.bytesToGB(diskSize)); metrics.put(DISK_USAGE + READABLE, UnitUtil.bytesToReadableString(diskSize)); - metrics.put(DISK_USAGE + "_details", probe.getLoadMap()); + metrics.put(DISK_USAGE + "_details", probe.getLoadMap(false)); metrics.put(DISK_UNIT, "GB"); // Uptime Metrics @@ -125,11 +125,11 @@ protected Map getMetricsByHost(String host) { this.appendExtraMetrics(metrics, probe); // Nodes Metrics - metrics.put("live_nodes", probe.getLiveNodes()); - metrics.put("joining_nodes", probe.getJoiningNodes()); - metrics.put("moving_nodes", probe.getMovingNodes()); - metrics.put("leaving_nodes", probe.getLeavingNodes()); - metrics.put("unreachable_nodes", probe.getUnreachableNodes()); + metrics.put("live_nodes", probe.getLiveNodes(false)); + metrics.put("joining_nodes", probe.getJoiningNodes(false)); + metrics.put("moving_nodes", probe.getMovingNodes(false)); + metrics.put("leaving_nodes", probe.getLeavingNodes(false)); + metrics.put("unreachable_nodes", probe.getUnreachableNodes(false)); // Others metrics.put("keyspaces", probe.getKeyspaces()); diff --git a/hugegraph-server/hugegraph-cassandra/src/main/java/org/apache/hugegraph/backend/store/cassandra/CassandraShard.java b/hugegraph-server/hugegraph-cassandra/src/main/java/org/apache/hugegraph/backend/store/cassandra/CassandraShard.java index c5734f62e7..9bcefb6aa4 100644 --- a/hugegraph-server/hugegraph-cassandra/src/main/java/org/apache/hugegraph/backend/store/cassandra/CassandraShard.java +++ b/hugegraph-server/hugegraph-cassandra/src/main/java/org/apache/hugegraph/backend/store/cassandra/CassandraShard.java @@ -32,7 +32,7 @@ import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; -import org.apache.cassandra.config.SchemaConstants; +import org.apache.cassandra.schema.SchemaConstants; import org.apache.cassandra.db.SystemKeyspace; import org.apache.cassandra.dht.ByteOrderedPartitioner; import org.apache.cassandra.dht.IPartitioner; @@ -222,7 +222,7 @@ private static Map describeSplits( "WHERE keyspace_name = ? AND table_name = ? AND " + "range_start = ? AND range_end = ?", SchemaConstants.SYSTEM_KEYSPACE_NAME, - SystemKeyspace.SIZE_ESTIMATES); + SystemKeyspace.LEGACY_SIZE_ESTIMATES); ResultSet resultSet = session.execute(query, keyspace, table, tokenRange.getStart().toString(), diff --git a/hugegraph-server/hugegraph-dist/release-docs/LICENSE b/hugegraph-server/hugegraph-dist/release-docs/LICENSE index 25c50c2fbb..b6306df6b5 100644 --- a/hugegraph-server/hugegraph-dist/release-docs/LICENSE +++ b/hugegraph-server/hugegraph-dist/release-docs/LICENSE @@ -252,7 +252,7 @@ See licenses/ for text of these licenses. (Apache License, Version 2.0) * jersey-media-json-jackson (org.glassfish.jersey.media:jersey-media-json-jackson:3.0.3 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-media-json-jackson) (Apache License, Version 2.0) * ASM based accessors helper used by json-smart (net.minidev:accessors-smart:1.2 - http://www.minidev.net/) (Apache License, Version 2.0) * Annotations for Metrics (io.dropwizard.metrics:metrics-annotation:4.2.4 - https://metrics.dropwizard.io/metrics-annotation) - (Apache License, Version 2.0) * Apache Cassandra (org.apache.cassandra:cassandra-all:3.11.12 - https://cassandra.apache.org) + (Apache License, Version 2.0) * Apache Cassandra (org.apache.cassandra:cassandra-all:4.0.10 - https://cassandra.apache.org) (Apache License, Version 2.0) * Apache Commons BeanUtils (commons-beanutils:commons-beanutils:1.9.4 - https://commons.apache.org/proper/commons-beanutils/) (Apache License, Version 2.0) * Apache Commons Codec (commons-codec:commons-codec:1.11 - http://commons.apache.org/proper/commons-codec/) (Apache License, Version 2.0) * Apache Commons Codec (commons-codec:commons-codec:1.15 - https://commons.apache.org/proper/commons-codec/) diff --git a/hugegraph-server/hugegraph-dist/scripts/dependency/known-dependencies.txt b/hugegraph-server/hugegraph-dist/scripts/dependency/known-dependencies.txt index b5f5036617..f1388437c7 100644 --- a/hugegraph-server/hugegraph-dist/scripts/dependency/known-dependencies.txt +++ b/hugegraph-server/hugegraph-dist/scripts/dependency/known-dependencies.txt @@ -1,7 +1,7 @@ HdrHistogram-2.1.9.jar ST4-4.0.8.jar accessors-smart-1.2.jar -airline-0.6.jar +airline-0.8.jar animal-sniffer-annotations-1.14.jar annotations-4.1.1.4.jar ansj_seg-5.1.6.jar @@ -9,8 +9,8 @@ antlr-runtime-3.5.2.jar aopalliance-repackaged-3.0.1.jar arthas-agent-attach-3.7.1.jar arthas-packaging-3.7.1.jar -asm-5.0.4.jar asm-6.0.jar +asm-7.1.jar asm-analysis-5.0.3.jar asm-commons-5.0.3.jar asm-tree-5.0.3.jar @@ -21,12 +21,17 @@ bolt-1.6.4.jar byte-buddy-1.10.5.jar byte-buddy-agent-1.10.5.jar byte-buddy-agent-1.11.6.jar -caffeine-2.2.6.jar caffeine-2.3.1.jar -cassandra-all-3.11.12.jar +caffeine-2.5.6.jar +cassandra-all-4.0.10.jar cassandra-driver-core-3.6.0.jar checker-qual-2.0.0.jar checker-qual-3.5.0.jar +chronicle-bytes-2.20.111.jar +chronicle-core-2.20.126.jar +chronicle-queue-5.20.123.jar +chronicle-threads-2.20.111.jar +chronicle-wire-2.20.117.jar classgraph-4.8.95.jar commons-beanutils-1.9.4.jar commons-cli-1.1.jar @@ -40,15 +45,12 @@ commons-configuration-1.10.jar commons-configuration2-2.8.0.jar commons-io-2.7.jar commons-lang-2.6.jar -commons-lang3-3.1.jar commons-lang3-3.11.jar commons-logging-1.1.1.jar commons-logging-1.2.jar commons-math3-3.2.jar commons-text-1.10.0.jar -compress-lzf-0.8.4.jar concurrent-trees-2.4.0.jar -concurrentlinkedhashmap-lru-1.4.jar cypher-gremlin-extensions-1.0.4.jar disruptor-3.3.7.jar eclipse-collections-11.1.0.jar @@ -98,18 +100,20 @@ hk2-api-3.0.1.jar hk2-locator-3.0.1.jar hk2-utils-3.0.1.jar hppc-0.7.1.jar +hppc-0.8.1.jar htrace-core4-4.2.0-incubating.jar httpclient-4.5.13.jar httpcore-4.4.13.jar ikanalyzer-2012_u6.jar ivy-2.4.0.jar j2objc-annotations-1.1.jar -jackson-annotations-2.12.5.jar +j2objc-annotations-1.3.jar +jackson-annotations-2.13.2.jar jackson-annotations-2.14.0-rc1.jar -jackson-core-2.12.5.jar +jackson-core-2.13.2.jar jackson-core-2.14.0-rc1.jar jackson-databind-2.12.1.jar -jackson-databind-2.12.5.jar +jackson-databind-2.13.2.2.jar jackson-databind-2.14.0-rc1.jar jackson-dataformat-yaml-2.9.3.jar jackson-datatype-jsr310-2.12.1.jar @@ -125,7 +129,9 @@ jakarta.servlet-api-5.0.0.jar jakarta.validation-api-3.0.0.jar jakarta.ws.rs-api-3.0.0.jar jakarta.xml.bind-api-4.0.0-RC2.jar -jamm-0.3.0.jar +jamm-0.3.2.jar +java-cup-runtime-11b-20160615.jar +jcommander-1.30.jar javapoet-1.8.0.jar javassist-3.21.0-GA.jar javatuples-1.2.jar @@ -141,8 +147,8 @@ jcabi-manifests-1.1.jar jcip-annotations-1.0-1.jar jcl-over-slf4j-1.7.25.jar jcseg-core-2.6.2.jar -jctools-core-1.2.1.jar jctools-core-2.1.1.jar +jctools-core-3.1.0.jar jersey-apache-connector-3.0.3.jar jersey-client-3.0.3.jar jersey-common-3.0.3.jar @@ -159,7 +165,7 @@ jersey-test-framework-core-3.0.3.jar jersey-test-framework-provider-grizzly2-3.0.3.jar jffi-1.2.16-native.jar jffi-1.2.16.jar -jflex-1.6.0.jar +jflex-1.8.2.jar jieba-analysis-1.0.2.jar jjwt-api-0.11.5.jar jjwt-impl-0.11.5.jar @@ -169,7 +175,7 @@ jna-5.12.1.jar jnr-ffi-2.1.7.jar jnr-x86asm-1.0.2.jar joda-time-2.10.8.jar -joda-time-2.4.jar +jvm-attach-api-1.5.jar jraft-core-1.3.11.jar json-simple-1.1.jar json-smart-2.3.jar @@ -189,7 +195,6 @@ kerby-config-2.0.0.jar kerby-pkix-2.0.0.jar kerby-util-2.0.0.jar kerby-xdr-2.0.0.jar -libthrift-0.9.2.jar log4j-api-2.17.1.jar log4j-core-2.17.1.jar log4j-slf4j-impl-2.17.1.jar @@ -200,7 +205,6 @@ lucene-core-8.11.2.jar lucene-queries-4.7.2.jar lucene-queryparser-4.7.2.jar lucene-sandbox-4.7.2.jar -lz4-1.3.0.jar lz4-java-1.8.0.jar metrics-annotation-4.2.4.jar metrics-core-3.0.2.jar @@ -212,13 +216,15 @@ metrics-jvm-3.1.5.jar metrics-logback-3.1.5.jar mmseg4j-core-1.10.0.jar mockito-core-3.3.3.jar +mxdump-0.14.jar netty-all-4.1.44.Final.jar netty-all-4.1.61.Final.jar +netty-tcnative-boringssl-static-2.0.36.Final.jar nimbus-jose-jwt-4.41.2.jar nlp-lang-1.7.7.jar objenesis-2.6.jar ohc-core-0.7.4.jar -ohc-core-j8-0.4.4.jar +ohc-core-j8-0.5.1.jar opentracing-api-0.22.0.jar opentracing-mock-0.22.0.jar opentracing-noop-0.22.0.jar @@ -231,6 +237,7 @@ perfmark-api-0.25.0.jar picocli-4.3.2.jar postgresql-42.4.1.jar protobuf-java-3.21.7.jar +psjava-0.1.19.jar reporter-config-base-3.0.3.jar reporter-config3-3.0.3.jar rewriting-9.0-9.0.20190305.jar @@ -238,12 +245,15 @@ rocksdbjni-7.2.2.jar scala-java8-compat_2.12-0.8.0.jar scala-library-2.12.7.jar scala-reflect-2.12.7.jar +sjk-cli-0.14.jar +sjk-core-0.14.jar +sjk-json-0.14.jar +sjk-stacktrace-0.14.jar sigar-1.6.4.jar slf4j-api-1.7.25.jar -slf4j-api-1.7.7.jar snakeyaml-1.26.jar snakeyaml-1.27.jar -snappy-java-1.1.1.7.jar +snappy-java-1.1.2.6.jar snowball-stemmer-1.3.0.581.1.jar sofa-common-tools-1.0.12.jar sofa-rpc-all-5.7.6.jar @@ -257,7 +267,6 @@ swagger-integration-jakarta-2.1.9.jar swagger-jaxrs2-jakarta-2.1.9.jar swagger-models-1.5.18.jar swagger-models-jakarta-2.1.9.jar -thrift-server-0.3.7.jar tinkergraph-gremlin-3.5.1.jar token-provider-2.0.0.jar tracer-core-3.0.8.jar @@ -265,3 +274,4 @@ translation-1.0.4.jar util-9.0-9.0.20190305.jar validation-api-1.1.0.Final.jar zt-zip-1.14.jar +zstd-jni-1.5.5-1.jar diff --git a/hugegraph-server/hugegraph-dist/src/assembly/travis/install-cassandra.sh b/hugegraph-server/hugegraph-dist/src/assembly/travis/install-cassandra.sh index 367259d141..2bdfe0bf6a 100755 --- a/hugegraph-server/hugegraph-dist/src/assembly/travis/install-cassandra.sh +++ b/hugegraph-server/hugegraph-dist/src/assembly/travis/install-cassandra.sh @@ -19,7 +19,7 @@ set -ev TRAVIS_DIR=`dirname $0` CASS_DOWNLOAD_ADDRESS="http://archive.apache.org/dist/cassandra" -CASS_VERSION="3.10" +CASS_VERSION="4.0.10" CASS_PACKAGE="apache-cassandra-${CASS_VERSION}" CASS_TAR="${CASS_PACKAGE}-bin.tar.gz" CASS_CONF="${CASS_PACKAGE}/conf/cassandra.yaml" From 2cd1269130cc0033d96a7a24aa164524fec26052 Mon Sep 17 00:00:00 2001 From: SunnyBoy-WYH <48077841+SunnyBoy-WYH@users.noreply.github.com> Date: Mon, 6 Nov 2023 14:15:32 +0800 Subject: [PATCH 21/24] chore(api): add swagger desc for Arthas & Metric & Cypher & White API (#2337) add swagger belong for arthas API --------- Co-authored-by: imbajin --- .github/workflows/ci.yml | 6 ++-- hugegraph-server/hugegraph-api/pom.xml | 2 +- .../hugegraph/api/arthas/ArthasAPI.java | 5 ++++ .../hugegraph/api/cypher/CypherAPI.java | 3 ++ .../hugegraph/api/job/AlgorithmAPI.java | 3 ++ .../hugegraph/api/metrics/MetricsAPI.java | 11 +++++++ .../hugegraph/api/profile/WhiteIpListAPI.java | 6 ++++ .../scripts/dependency/known-dependencies.txt | 29 ++++++++++--------- hugegraph-server/pom.xml | 1 + 9 files changed, 49 insertions(+), 17 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b13dc55b3d..3512586a32 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -20,7 +20,7 @@ jobs: HEAD_BRANCH_NAME: ${{ github.head_ref }} BASE_BRANCH_NAME: ${{ github.base_ref }} TARGET_BRANCH_NAME: ${{ github.base_ref != '' && github.base_ref || github.ref_name }} - RELEASE_BRANCH: ${{ startsWith(github.ref_name, 'release-') || startsWith(github.ref_name, 'test-') || startsWith(github.base_ref, 'release-') }} + RELEASE_BRANCH: ${{ startsWith(github.ref_name, 'release-') || startsWith(github.ref_name, 'test-') }} strategy: fail-fast: false @@ -43,7 +43,7 @@ jobs: restore-keys: ${{ runner.os }}-m2 - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 2 @@ -98,6 +98,6 @@ jobs: $TRAVIS_DIR/run-tinkerpop-test.sh $BACKEND tinkerpop - name: Upload coverage to Codecov - uses: codecov/codecov-action@v3.0.0 + uses: codecov/codecov-action@v3 with: file: ${{ env.REPORT_DIR }}/*.xml diff --git a/hugegraph-server/hugegraph-api/pom.xml b/hugegraph-server/hugegraph-api/pom.xml index 82e8032cc2..d365ce22f8 100644 --- a/hugegraph-server/hugegraph-api/pom.xml +++ b/hugegraph-server/hugegraph-api/pom.xml @@ -151,7 +151,7 @@ io.swagger.core.v3 swagger-jaxrs2-jakarta - 2.1.9 + ${swagger.version} diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/arthas/ArthasAPI.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/arthas/ArthasAPI.java index 549f9de0a8..67e65a31a8 100644 --- a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/arthas/ArthasAPI.java +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/arthas/ArthasAPI.java @@ -27,6 +27,9 @@ import com.codahale.metrics.annotation.Timed; import com.taobao.arthas.agent.attach.ArthasAgent; +import io.swagger.v3.oas.annotations.Operation; +import io.swagger.v3.oas.annotations.tags.Tag; + import jakarta.inject.Singleton; import jakarta.ws.rs.PUT; import jakarta.ws.rs.Path; @@ -35,6 +38,7 @@ @Path("arthas") @Singleton +@Tag(name = "ArthasAPI") public class ArthasAPI extends API { @Context @@ -43,6 +47,7 @@ public class ArthasAPI extends API { @PUT @Timed @Produces(APPLICATION_JSON_WITH_CHARSET) + @Operation(summary = "start arthas agent") public Object startArthas() { HugeConfig config = this.configProvider.get(); HashMap configMap = new HashMap<>(4); diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/cypher/CypherAPI.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/cypher/CypherAPI.java index bf43d6af44..0018bcd2f2 100644 --- a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/cypher/CypherAPI.java +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/cypher/CypherAPI.java @@ -33,6 +33,8 @@ import com.codahale.metrics.annotation.Timed; +import io.swagger.v3.oas.annotations.tags.Tag; + import jakarta.inject.Singleton; import jakarta.ws.rs.Consumes; import jakarta.ws.rs.GET; @@ -47,6 +49,7 @@ @Path("graphs/{graph}/cypher") @Singleton +@Tag(name = "CypherAPI") public class CypherAPI extends API { private static final Logger LOG = Log.logger(CypherAPI.class); diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/job/AlgorithmAPI.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/job/AlgorithmAPI.java index 8e0e7d10c3..8341adad8b 100644 --- a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/job/AlgorithmAPI.java +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/job/AlgorithmAPI.java @@ -36,6 +36,8 @@ import com.codahale.metrics.annotation.Timed; import com.google.common.collect.ImmutableMap; +import io.swagger.v3.oas.annotations.tags.Tag; + import jakarta.inject.Singleton; import jakarta.ws.rs.Consumes; import jakarta.ws.rs.NotFoundException; @@ -47,6 +49,7 @@ @Path("graphs/{graph}/jobs/algorithm") @Singleton +@Tag(name = "AlgorithmAPI") public class AlgorithmAPI extends API { private static final Logger LOG = Log.logger(RestServer.class); diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/metrics/MetricsAPI.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/metrics/MetricsAPI.java index f74286b5f8..952ac90eeb 100644 --- a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/metrics/MetricsAPI.java +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/metrics/MetricsAPI.java @@ -70,7 +70,9 @@ import com.codahale.metrics.Metric; import com.codahale.metrics.annotation.Timed; +import io.swagger.v3.oas.annotations.Operation; import io.swagger.v3.oas.annotations.tags.Tag; + import jakarta.annotation.security.RolesAllowed; import jakarta.inject.Singleton; import jakarta.ws.rs.GET; @@ -103,6 +105,7 @@ public MetricsAPI() { @Path("system") @Produces(APPLICATION_JSON_WITH_CHARSET) @RolesAllowed({"admin", "$owner= $action=metrics_read"}) + @Operation(summary = "get the system metrics") public String system() { return JsonUtil.toJson(this.systemMetrics.metrics()); } @@ -112,6 +115,7 @@ public String system() { @Path("backend") @Produces(APPLICATION_JSON_WITH_CHARSET) @RolesAllowed({"admin", "$owner= $action=metrics_read"}) + @Operation(summary = "get the backend metrics") public String backend(@Context GraphManager manager) { Map> results = InsertionOrderUtil.newMap(); for (String graph : manager.graphs()) { @@ -134,6 +138,7 @@ public String backend(@Context GraphManager manager) { @Path("gauges") @Produces(APPLICATION_JSON_WITH_CHARSET) @RolesAllowed({"admin", "$owner= $action=metrics_read"}) + @Operation(summary = "get the gauges metrics") public String gauges() { ServerReporter reporter = ServerReporter.instance(); return JsonUtil.toJson(reporter.gauges()); @@ -144,6 +149,7 @@ public String gauges() { @Path("counters") @Produces(APPLICATION_JSON_WITH_CHARSET) @RolesAllowed({"admin", "$owner= $action=metrics_read"}) + @Operation(summary = "get the counters metrics") public String counters() { ServerReporter reporter = ServerReporter.instance(); return JsonUtil.toJson(reporter.counters()); @@ -154,6 +160,7 @@ public String counters() { @Path("histograms") @Produces(APPLICATION_JSON_WITH_CHARSET) @RolesAllowed({"admin", "$owner= $action=metrics_read"}) + @Operation(summary = "get the histograms metrics") public String histograms() { ServerReporter reporter = ServerReporter.instance(); return JsonUtil.toJson(reporter.histograms()); @@ -164,6 +171,7 @@ public String histograms() { @Path("meters") @Produces(APPLICATION_JSON_WITH_CHARSET) @RolesAllowed({"admin", "$owner= $action=metrics_read"}) + @Operation(summary = "get the meters metrics") public String meters() { ServerReporter reporter = ServerReporter.instance(); return JsonUtil.toJson(reporter.meters()); @@ -174,6 +182,7 @@ public String meters() { @Path("timers") @Produces(APPLICATION_JSON_WITH_CHARSET) @RolesAllowed({"admin", "$owner= $action=metrics_read"}) + @Operation(summary = "get the timers metrics") public String timers() { ServerReporter reporter = ServerReporter.instance(); return JsonUtil.toJson(reporter.timers()); @@ -183,6 +192,7 @@ public String timers() { @Timed @Produces(APPLICATION_TEXT_WITH_CHARSET) @RolesAllowed({"admin", "$owner= $action=metrics_read"}) + @Operation(summary = "get all base metrics") public String all(@Context GraphManager manager, @QueryParam("type") String type) { if (type != null && type.equals(JSON_STR)) { @@ -197,6 +207,7 @@ public String all(@Context GraphManager manager, @Timed @Produces(APPLICATION_TEXT_WITH_CHARSET) @RolesAllowed({"admin", "$owner= $action=metrics_read"}) + @Operation(summary = "get all statistics metrics") public String statistics(@QueryParam("type") String type) { Map> metricMap = statistics(); diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/profile/WhiteIpListAPI.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/profile/WhiteIpListAPI.java index 7503e13822..860da55750 100644 --- a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/profile/WhiteIpListAPI.java +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/profile/WhiteIpListAPI.java @@ -37,6 +37,8 @@ import com.codahale.metrics.annotation.Timed; import com.google.common.collect.ImmutableMap; +import io.swagger.v3.oas.annotations.Operation; +import io.swagger.v3.oas.annotations.tags.Tag; import jakarta.annotation.security.RolesAllowed; import jakarta.inject.Singleton; import jakarta.ws.rs.Consumes; @@ -50,6 +52,7 @@ @Path("whiteiplist") @Singleton +@Tag(name = "WhiteIpListAPI") public class WhiteIpListAPI extends API { private static final Logger LOG = Log.logger(WhiteIpListAPI.class); @@ -58,6 +61,7 @@ public class WhiteIpListAPI extends API { @Timed @Produces(APPLICATION_JSON_WITH_CHARSET) @RolesAllowed("admin") + @Operation(summary = "list white ips") public Map list(@Context GraphManager manager) { LOG.debug("List white ips"); AuthManager authManager = manager.authManager(); @@ -71,6 +75,7 @@ public Map list(@Context GraphManager manager) { @Consumes(APPLICATION_JSON) @Produces(APPLICATION_JSON_WITH_CHARSET) @RolesAllowed("admin") + @Operation(summary = "update white ip list") public Map updateWhiteIPs(@Context GraphManager manager, Map actionMap) { E.checkArgument(actionMap != null, "Missing argument: actionMap"); @@ -131,6 +136,7 @@ public Map updateWhiteIPs(@Context GraphManager manager, Map updateStatus(@Context GraphManager manager, @QueryParam("status") String status) { LOG.debug("Enable or disable white ip list"); E.checkArgument("true".equals(status) || diff --git a/hugegraph-server/hugegraph-dist/scripts/dependency/known-dependencies.txt b/hugegraph-server/hugegraph-dist/scripts/dependency/known-dependencies.txt index f1388437c7..d40b204333 100644 --- a/hugegraph-server/hugegraph-dist/scripts/dependency/known-dependencies.txt +++ b/hugegraph-server/hugegraph-dist/scripts/dependency/known-dependencies.txt @@ -32,7 +32,7 @@ chronicle-core-2.20.126.jar chronicle-queue-5.20.123.jar chronicle-threads-2.20.111.jar chronicle-wire-2.20.117.jar -classgraph-4.8.95.jar +classgraph-4.8.162.jar commons-beanutils-1.9.4.jar commons-cli-1.1.jar commons-codec-1.11.jar @@ -112,14 +112,16 @@ jackson-annotations-2.13.2.jar jackson-annotations-2.14.0-rc1.jar jackson-core-2.13.2.jar jackson-core-2.14.0-rc1.jar -jackson-databind-2.12.1.jar jackson-databind-2.13.2.2.jar jackson-databind-2.14.0-rc1.jar +jackson-databind-2.15.2.jar jackson-dataformat-yaml-2.9.3.jar -jackson-datatype-jsr310-2.12.1.jar +jackson-datatype-jsr310-2.15.2.jar +jackson-jakarta-rs-base-2.15.2.jar +jackson-jakarta-rs-json-provider-2.15.2.jar jackson-jaxrs-base-2.14.0-rc1.jar -jackson-jaxrs-json-provider-2.12.1-jakarta.jar jackson-jaxrs-json-provider-2.14.0-rc1.jar +jackson-module-jakarta-xmlbind-annotations-2.15.2.jar jackson-module-jaxb-annotations-2.14.0-rc1.jar jakarta.activation-2.0.1.jar jakarta.activation-api-1.2.2.jar @@ -131,7 +133,6 @@ jakarta.ws.rs-api-3.0.0.jar jakarta.xml.bind-api-4.0.0-RC2.jar jamm-0.3.2.jar java-cup-runtime-11b-20160615.jar -jcommander-1.30.jar javapoet-1.8.0.jar javassist-3.21.0-GA.jar javatuples-1.2.jar @@ -146,6 +147,7 @@ jcabi-log-0.14.jar jcabi-manifests-1.1.jar jcip-annotations-1.0-1.jar jcl-over-slf4j-1.7.25.jar +jcommander-1.30.jar jcseg-core-2.6.2.jar jctools-core-2.1.1.jar jctools-core-3.1.0.jar @@ -175,12 +177,12 @@ jna-5.12.1.jar jnr-ffi-2.1.7.jar jnr-x86asm-1.0.2.jar joda-time-2.10.8.jar -jvm-attach-api-1.5.jar jraft-core-1.3.11.jar json-simple-1.1.jar json-smart-2.3.jar jsr305-3.0.1.jar junit-4.12.jar +jvm-attach-api-1.5.jar kerb-admin-2.0.0.jar kerb-client-2.0.0.jar kerb-common-2.0.0.jar @@ -245,14 +247,15 @@ rocksdbjni-7.2.2.jar scala-java8-compat_2.12-0.8.0.jar scala-library-2.12.7.jar scala-reflect-2.12.7.jar +sigar-1.6.4.jar sjk-cli-0.14.jar sjk-core-0.14.jar sjk-json-0.14.jar sjk-stacktrace-0.14.jar -sigar-1.6.4.jar slf4j-api-1.7.25.jar snakeyaml-1.26.jar snakeyaml-1.27.jar +snakeyaml-2.2.jar snappy-java-1.1.2.6.jar snowball-stemmer-1.3.0.581.1.jar sofa-common-tools-1.0.12.jar @@ -260,18 +263,18 @@ sofa-rpc-all-5.7.6.jar sourcecode_2.12-0.1.4.jar stream-2.5.2.jar swagger-annotations-1.5.18.jar -swagger-annotations-jakarta-2.1.9.jar +swagger-annotations-jakarta-2.2.18.jar swagger-core-1.5.18.jar -swagger-core-jakarta-2.1.9.jar -swagger-integration-jakarta-2.1.9.jar -swagger-jaxrs2-jakarta-2.1.9.jar +swagger-core-jakarta-2.2.18.jar +swagger-integration-jakarta-2.2.18.jar +swagger-jaxrs2-jakarta-2.2.18.jar swagger-models-1.5.18.jar -swagger-models-jakarta-2.1.9.jar +swagger-models-jakarta-2.2.18.jar tinkergraph-gremlin-3.5.1.jar token-provider-2.0.0.jar tracer-core-3.0.8.jar translation-1.0.4.jar util-9.0-9.0.20190305.jar validation-api-1.1.0.Final.jar -zt-zip-1.14.jar zstd-jni-1.5.5-1.jar +zt-zip-1.14.jar diff --git a/hugegraph-server/pom.xml b/hugegraph-server/pom.xml index 3bb04f0faf..a112647f93 100644 --- a/hugegraph-server/pom.xml +++ b/hugegraph-server/pom.xml @@ -115,6 +115,7 @@ 3.21.7 1.36 3.7.1 + 2.2.18 From 0e639914602cf5200988568670403ea749236b07 Mon Sep 17 00:00:00 2001 From: SunnyBoy-WYH <48077841+SunnyBoy-WYH@users.noreply.github.com> Date: Mon, 6 Nov 2023 14:34:12 +0800 Subject: [PATCH 22/24] feat(api): support recording slow query log (#2327) * chore(api): code style for cr --------- Co-authored-by: imbajin --- .../hugegraph/api/filter/AccessLogFilter.java | 46 ++++++++++++++++++- .../hugegraph/api/filter/PathFilter.java | 24 ++++++++++ .../hugegraph/config/ServerOptions.java | 9 ++++ .../hugegraph/metrics/SlowQueryLog.java | 43 +++++++++++++++++ .../src/assembly/static/conf/log4j2.xml | 27 +++++++++++ .../static/conf/rest-server.properties | 3 ++ .../src/main/resources/log4j2.xml | 28 +++++++++++ .../src/main/resources/log4j2.xml | 28 +++++++++++ 8 files changed, 206 insertions(+), 2 deletions(-) create mode 100644 hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/metrics/SlowQueryLog.java diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/filter/AccessLogFilter.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/filter/AccessLogFilter.java index ba9c981186..3b529cf0a3 100644 --- a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/filter/AccessLogFilter.java +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/filter/AccessLogFilter.java @@ -17,6 +17,7 @@ package org.apache.hugegraph.api.filter; +import static org.apache.hugegraph.api.filter.PathFilter.REQUEST_PARAMS_JSON; import static org.apache.hugegraph.api.filter.PathFilter.REQUEST_TIME; import static org.apache.hugegraph.metrics.MetricsUtil.METRICS_PATH_FAILED_COUNTER; import static org.apache.hugegraph.metrics.MetricsUtil.METRICS_PATH_RESPONSE_TIME_HISTOGRAM; @@ -25,12 +26,20 @@ import java.io.IOException; +import org.apache.hugegraph.config.HugeConfig; +import org.apache.hugegraph.config.ServerOptions; import org.apache.hugegraph.metrics.MetricsUtil; +import org.apache.hugegraph.metrics.SlowQueryLog; +import org.apache.hugegraph.util.JsonUtil; +import org.apache.hugegraph.util.Log; +import org.slf4j.Logger; import jakarta.inject.Singleton; +import jakarta.ws.rs.HttpMethod; import jakarta.ws.rs.container.ContainerRequestContext; import jakarta.ws.rs.container.ContainerResponseContext; import jakarta.ws.rs.container.ContainerResponseFilter; +import jakarta.ws.rs.core.Context; import jakarta.ws.rs.ext.Provider; @@ -39,6 +48,14 @@ public class AccessLogFilter implements ContainerResponseFilter { private static final String DELIMETER = "/"; + private static final String GRAPHS = "graphs"; + private static final String GREMLIN = "gremlin"; + private static final String CYPHER = "cypher"; + + private static final Logger LOG = Log.logger(AccessLogFilter.class); + + @Context + private jakarta.inject.Provider configProvider; /** * Use filter to log request info @@ -62,13 +79,24 @@ public void filter(ContainerRequestContext requestContext, ContainerResponseCont // get responseTime Object requestTime = requestContext.getProperty(REQUEST_TIME); - if(requestTime!=null){ + if(requestTime != null){ long now = System.currentTimeMillis(); - long responseTime = (now - (long)requestTime); + long start = (Long) requestTime; + long responseTime = now - start; MetricsUtil.registerHistogram( join(metricsName, METRICS_PATH_RESPONSE_TIME_HISTOGRAM)) .update(responseTime); + + HugeConfig config = configProvider.get(); + long timeThreshold = config.get(ServerOptions.SLOW_QUERY_LOG_TIME_THRESHOLD); + + // record slow query log + if (timeThreshold > 0 && isSlowQueryLogWhiteAPI(requestContext) && responseTime > timeThreshold) { + SlowQueryLog log = new SlowQueryLog(responseTime, start, (String) requestContext.getProperty(REQUEST_PARAMS_JSON), + method, timeThreshold, path); + LOG.info("Slow query: {}", JsonUtil.toJson(log)); + } } } @@ -79,4 +107,18 @@ private String join(String path1, String path2) { private boolean statusOk(int status){ return status == 200 || status == 201 || status == 202; } + + public static boolean isSlowQueryLogWhiteAPI(ContainerRequestContext context) { + String path = context.getUriInfo().getPath(); + String method = context.getRequest().getMethod(); + + // GraphsAPI/CypherAPI/Job GremlinAPI + if (path.startsWith(GRAPHS)) { + if (method.equals(HttpMethod.GET) || path.endsWith(CYPHER) || path.endsWith(GREMLIN) ){ + return true; + } + } + // Raw GremlinAPI + return path.startsWith(GREMLIN); + } } diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/filter/PathFilter.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/filter/PathFilter.java index 3414d6831b..e1e449ef26 100644 --- a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/filter/PathFilter.java +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/api/filter/PathFilter.java @@ -17,12 +17,20 @@ package org.apache.hugegraph.api.filter; +import static org.apache.hugegraph.api.API.CHARSET; + import java.io.IOException; +import java.io.InputStream; + +import org.apache.commons.io.Charsets; +import org.apache.commons.io.IOUtils; import jakarta.inject.Singleton; +import jakarta.ws.rs.HttpMethod; import jakarta.ws.rs.container.ContainerRequestContext; import jakarta.ws.rs.container.ContainerRequestFilter; import jakarta.ws.rs.container.PreMatching; +import jakarta.ws.rs.core.MultivaluedMap; import jakarta.ws.rs.ext.Provider; @Provider @@ -31,10 +39,26 @@ public class PathFilter implements ContainerRequestFilter { public static final String REQUEST_TIME = "request_time"; + public static final String REQUEST_PARAMS_JSON = "request_params_json"; @Override public void filter(ContainerRequestContext context) throws IOException { context.setProperty(REQUEST_TIME, System.currentTimeMillis()); + + // record the request json + String method = context.getMethod(); + String requestParamsJson = ""; + if (method.equals(HttpMethod.POST)) { + requestParamsJson = IOUtils.toString(context.getEntityStream(), Charsets.toCharset(CHARSET)); + // replace input stream because we have already read it + InputStream in = IOUtils.toInputStream(requestParamsJson, Charsets.toCharset(CHARSET)); + context.setEntityStream(in); + } else if(method.equals(HttpMethod.GET)){ + MultivaluedMap pathParameters = context.getUriInfo().getPathParameters(); + requestParamsJson = pathParameters.toString(); + } + + context.setProperty(REQUEST_PARAMS_JSON, requestParamsJson); } } diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/config/ServerOptions.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/config/ServerOptions.java index e8b999fb56..a8bbe5a5f2 100644 --- a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/config/ServerOptions.java +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/config/ServerOptions.java @@ -304,4 +304,13 @@ public static synchronized ServerOptions instance() { null, "jad" ); + + public static final ConfigOption SLOW_QUERY_LOG_TIME_THRESHOLD = + new ConfigOption<>( + "log.slow_query_threshold", + "The threshold time(ms) of logging slow query, " + + "0 means logging slow query is disabled.", + nonNegativeInt(), + 1000L + ); } diff --git a/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/metrics/SlowQueryLog.java b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/metrics/SlowQueryLog.java new file mode 100644 index 0000000000..cb3f1c7125 --- /dev/null +++ b/hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/metrics/SlowQueryLog.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +package org.apache.hugegraph.metrics; + + +public class SlowQueryLog { + + public Long executeTime; + + public Long startTime; + + public String rawQuery; + + public String method; + + public Long threshold; + + public String path; + + public SlowQueryLog(Long executeTime, Long startTime, String rawQuery, String method, Long threshold, + String path) { + this.executeTime = executeTime; + this.startTime = startTime; + this.rawQuery = rawQuery; + this.method = method; + this.threshold = threshold; + this.path = path; + } +} diff --git a/hugegraph-server/hugegraph-dist/src/assembly/static/conf/log4j2.xml b/hugegraph-server/hugegraph-dist/src/assembly/static/conf/log4j2.xml index 985ab78b2f..db58e89112 100644 --- a/hugegraph-server/hugegraph-dist/src/assembly/static/conf/log4j2.xml +++ b/hugegraph-server/hugegraph-dist/src/assembly/static/conf/log4j2.xml @@ -76,6 +76,30 @@ + + + + + + + + + + + + + + + + + + + + + + @@ -113,5 +137,8 @@ + + + diff --git a/hugegraph-server/hugegraph-dist/src/assembly/static/conf/rest-server.properties b/hugegraph-server/hugegraph-dist/src/assembly/static/conf/rest-server.properties index f6444f84fb..23f78c5824 100644 --- a/hugegraph-server/hugegraph-dist/src/assembly/static/conf/rest-server.properties +++ b/hugegraph-server/hugegraph-dist/src/assembly/static/conf/rest-server.properties @@ -48,3 +48,6 @@ rpc.server_port=8091 # lightweight load balancing (beta) server.id=server-1 server.role=master + +# slow query log +log.slow_query_threshold=1000 diff --git a/hugegraph-server/hugegraph-dist/src/main/resources/log4j2.xml b/hugegraph-server/hugegraph-dist/src/main/resources/log4j2.xml index bdd391e58b..5d80816291 100644 --- a/hugegraph-server/hugegraph-dist/src/main/resources/log4j2.xml +++ b/hugegraph-server/hugegraph-dist/src/main/resources/log4j2.xml @@ -76,6 +76,30 @@ + + + + + + + + + + + + + + + + + + + + + + @@ -124,5 +148,9 @@ + + + + diff --git a/hugegraph-server/hugegraph-test/src/main/resources/log4j2.xml b/hugegraph-server/hugegraph-test/src/main/resources/log4j2.xml index e830c6248e..284f53487c 100644 --- a/hugegraph-server/hugegraph-test/src/main/resources/log4j2.xml +++ b/hugegraph-server/hugegraph-test/src/main/resources/log4j2.xml @@ -76,6 +76,30 @@ + + + + + + + + + + + + + + + + + + + + + + @@ -124,5 +148,9 @@ + + + + From 2e623fe9eab15d2a5b48ffd73961597d41756811 Mon Sep 17 00:00:00 2001 From: V_Galaxy Date: Wed, 8 Nov 2023 18:08:08 +0800 Subject: [PATCH 23/24] refact: adjust project structure for merge PD & Store[Breaking Change] (#2338) Subtask of #2265. Adjust the project structure of this repository to include three sub-modules: hugegraph-server, hugegraph-pd, hugegraph-store at the root level. Roll back to the moment when https://github.com/apache/incubator-hugegraph/pull/2266 was merged on `pd-store` and incorporate the latest changes in `master`. For more detailed information, please refer to https://github.com/apache/incubator-hugegraph/pull/2266#issue-1834369489. --------- Co-authored-by: M <87920097+msgui@users.noreply.github.com> --- .licenserc.yaml | 7 +- hugegraph-pd/pom.xml | 126 ++++- .../scripts/dependency/known-dependencies.txt | 50 +- hugegraph-server/hugegraph-postgresql/pom.xml | 2 +- hugegraph-server/pom.xml | 197 +++----- hugegraph-store/pom.xml | 150 +++++- pom.xml | 450 ++++-------------- 7 files changed, 473 insertions(+), 509 deletions(-) diff --git a/.licenserc.yaml b/.licenserc.yaml index be040a8a18..70bba11e1a 100644 --- a/.licenserc.yaml +++ b/.licenserc.yaml @@ -68,7 +68,7 @@ header: # `header` section is configurations for source codes license header. - '**/*.properties' - '**/RaftRequests.java' - 'dist/**/*' - - 'hugegraph-dist' + - 'hugegraph-server/hugegraph-dist' - '**/assembly/static/bin/hugegraph.service' - 'scripts/dev/reviewers' - 'scripts/dev/reviewers' @@ -89,13 +89,10 @@ header: # `header` section is configurations for source codes license header. - '**/META-INF/MANIFEST.MF' - '.repository/**' - '**/.flattened-pom.xml' - - 'hugegraph-core/src/main/java/org/apache/hugegraph/backend/id/SnowflakeIdGenerator.java' - '**/optimize/HugeScriptTraversal.java' - '**/type/Nameable.java' - '**/define/Cardinality.java' - '**/util/StringEncoding.java' - - 'hugegraph-api/src/main/java/org/apache/hugegraph/opencypher/CypherOpProcessor.java' - - 'hugegraph-api/src/main/java/org/apache/hugegraph/opencypher/CypherPlugin.java' # TODO: temporarily added to the ignore list, need handle them before releases ( ↓ ) - 'hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/opencypher/CypherOpProcessor.java' - 'hugegraph-server/hugegraph-api/src/main/java/org/apache/hugegraph/opencypher/CypherPlugin.java' @@ -104,7 +101,7 @@ header: # `header` section is configurations for source codes license header. - 'hugegraph-store/hg-store-node/src/main/java/org/apache/hugegraph/store/node/metrics/ProcfsReader.java' - 'hugegraph-store/hg-store-node/src/main/java/org/apache/hugegraph/store/node/metrics/ProcfsSmaps.java' # TODO: temporarily added to the ignore list, need handle them before releases ( ↑ ) - - 'hugegraph-dist/src/assembly/static/bin/wait-storage.sh' + - 'hugegraph-server/hugegraph-dist/src/assembly/static/bin/wait-storage.sh' comment: on-failure # on what condition license-eye will comment on the pull request, `on-failure`, `always`, `never`. # license-location-threshold specifies the index threshold where the license header can be located, diff --git a/hugegraph-pd/pom.xml b/hugegraph-pd/pom.xml index 86c1fa74db..743ead0d19 100644 --- a/hugegraph-pd/pom.xml +++ b/hugegraph-pd/pom.xml @@ -139,10 +139,134 @@ false - + + + + + + pd-client-test + + true + + + + + org.apache.maven.plugins + maven-surefire-plugin + 2.20 + + + pd-client-test + + test + + test + + + + + + + + pd-core-test + + true + + + + + org.apache.maven.plugins + maven-surefire-plugin + 2.20 + + + pd-core-test + + test + + test + + + + + + + + pd-cli-tools-test + + true + + + + + org.apache.maven.plugins + maven-surefire-plugin + 2.20 + + + pd-cli-tools-test + + test + + test + + + + + + + + pd-common-test + + true + + + + + org.apache.maven.plugins + maven-surefire-plugin + 2.20 + + + pd-common-test + + test + + test + + + + + + + + pd-service-test + + true + + + + + org.apache.maven.plugins + maven-surefire-plugin + 2.20 + + + pd-service-test + + test + + test + + + + + + + diff --git a/hugegraph-server/hugegraph-dist/scripts/dependency/known-dependencies.txt b/hugegraph-server/hugegraph-dist/scripts/dependency/known-dependencies.txt index d40b204333..e115031d4d 100644 --- a/hugegraph-server/hugegraph-dist/scripts/dependency/known-dependencies.txt +++ b/hugegraph-server/hugegraph-dist/scripts/dependency/known-dependencies.txt @@ -1,8 +1,6 @@ -HdrHistogram-2.1.9.jar -ST4-4.0.8.jar accessors-smart-1.2.jar airline-0.8.jar -animal-sniffer-annotations-1.14.jar +animal-sniffer-annotations-1.19.jar annotations-4.1.1.4.jar ansj_seg-5.1.6.jar antlr-runtime-3.5.2.jar @@ -17,6 +15,7 @@ asm-tree-5.0.3.jar asm-util-5.0.3.jar ast-9.0-9.0.20190305.jar audience-annotations-0.5.0.jar +auto-service-annotations-1.0.jar bolt-1.6.4.jar byte-buddy-1.10.5.jar byte-buddy-agent-1.10.5.jar @@ -25,7 +24,8 @@ caffeine-2.3.1.jar caffeine-2.5.6.jar cassandra-all-4.0.10.jar cassandra-driver-core-3.6.0.jar -checker-qual-2.0.0.jar +checker-compat-qual-2.5.5.jar +checker-qual-3.12.0.jar checker-qual-3.5.0.jar chronicle-bytes-2.20.111.jar chronicle-core-2.20.126.jar @@ -40,6 +40,7 @@ commons-codec-1.13.jar commons-codec-1.15.jar commons-codec-1.9.jar commons-collections-3.2.2.jar +commons-collections4-4.4.jar commons-compress-1.21.jar commons-configuration-1.10.jar commons-configuration2-2.8.0.jar @@ -55,10 +56,12 @@ cypher-gremlin-extensions-1.0.4.jar disruptor-3.3.7.jar eclipse-collections-11.1.0.jar eclipse-collections-api-11.1.0.jar -error_prone_annotations-2.1.3.jar error_prone_annotations-2.10.0.jar +error_prone_annotations-2.3.4.jar exp4j-0.4.8.jar expressions-9.0-9.0.20190305.jar +failsafe-2.4.1.jar +failureaccess-1.0.1.jar fastparse_2.12-2.0.4.jar fastutil-8.5.9.jar findbugs-annotations-1.3.9-1.jar @@ -84,17 +87,33 @@ groovy-jsr223-2.5.14-indy.jar groovy-swing-2.5.14.jar groovy-templates-2.5.14.jar groovy-xml-2.5.14.jar +grpc-api-1.39.0.jar grpc-api-1.47.0.jar +grpc-context-1.39.0.jar grpc-context-1.47.0.jar +grpc-core-1.39.0.jar grpc-core-1.47.0.jar +grpc-grpclb-1.39.0.jar +grpc-netty-shaded-1.39.0.jar grpc-netty-shaded-1.47.0.jar -gson-2.9.0.jar -guava-25.1-jre.jar +grpc-protobuf-1.39.0.jar +grpc-protobuf-lite-1.39.0.jar +gson-2.8.9.jar +guava-27.0-jre.jar +guava-30.0-jre.jar +guava-31.0.1-android.jar hamcrest-2.2.jar hamcrest-core-1.3.jar hanlp-portable-1.8.3.jar hbase-shaded-endpoint-2.0.6.jar +HdrHistogram-2.1.9.jar hessian-3.3.6.jar +hg-pd-client-1.5.0.1.jar +hg-pd-common-1.5.0.1.jar +hg-pd-grpc-1.5.0.1.jar +hg-store-client-1.5.0.1.jar +hg-store-common-1.5.0.1.jar +hg-store-grpc-1.5.0.1.jar high-scale-lib-1.0.6.jar hk2-api-3.0.1.jar hk2-locator-3.0.1.jar @@ -106,7 +125,6 @@ httpclient-4.5.13.jar httpcore-4.4.13.jar ikanalyzer-2012_u6.jar ivy-2.4.0.jar -j2objc-annotations-1.1.jar j2objc-annotations-1.3.jar jackson-annotations-2.13.2.jar jackson-annotations-2.14.0-rc1.jar @@ -137,6 +155,7 @@ javapoet-1.8.0.jar javassist-3.21.0-GA.jar javatuples-1.2.jar javax.activation-api-1.2.0.jar +javax.annotation-api-1.3.2.jar javax.inject-1.jar javax.json-1.0.jar jaxb-api-2.3.1.jar @@ -165,8 +184,10 @@ jersey-media-json-jackson-3.0.3.jar jersey-server-3.0.3.jar jersey-test-framework-core-3.0.3.jar jersey-test-framework-provider-grizzly2-3.0.3.jar -jffi-1.2.16-native.jar +jetcd-common-0.5.9.jar +jetcd-core-0.5.9.jar jffi-1.2.16.jar +jffi-1.2.16-native.jar jflex-1.8.2.jar jieba-analysis-1.0.2.jar jjwt-api-0.11.5.jar @@ -181,7 +202,7 @@ jraft-core-1.3.11.jar json-simple-1.1.jar json-smart-2.3.jar jsr305-3.0.1.jar -junit-4.12.jar +junit-4.13.1.jar jvm-attach-api-1.5.jar kerb-admin-2.0.0.jar kerb-client-2.0.0.jar @@ -197,9 +218,11 @@ kerby-config-2.0.0.jar kerby-pkix-2.0.0.jar kerby-util-2.0.0.jar kerby-xdr-2.0.0.jar +listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar log4j-api-2.17.1.jar log4j-core-2.17.1.jar log4j-slf4j-impl-2.17.1.jar +lombok-1.18.20.jar lookout-api-1.4.1.jar lucene-analyzers-common-8.11.2.jar lucene-analyzers-smartcn-8.11.2.jar @@ -237,11 +260,13 @@ parboiled-scala_2.12-1.2.0.jar parser-9.0-9.0.20190305.jar perfmark-api-0.25.0.jar picocli-4.3.2.jar -postgresql-42.4.1.jar +postgresql-42.4.3.jar protobuf-java-3.21.7.jar +protobuf-java-util-3.17.2.jar +proto-google-common-protos-2.0.1.jar psjava-0.1.19.jar -reporter-config-base-3.0.3.jar reporter-config3-3.0.3.jar +reporter-config-base-3.0.3.jar rewriting-9.0-9.0.20190305.jar rocksdbjni-7.2.2.jar scala-java8-compat_2.12-0.8.0.jar @@ -261,6 +286,7 @@ snowball-stemmer-1.3.0.581.1.jar sofa-common-tools-1.0.12.jar sofa-rpc-all-5.7.6.jar sourcecode_2.12-0.1.4.jar +ST4-4.0.8.jar stream-2.5.2.jar swagger-annotations-1.5.18.jar swagger-annotations-jakarta-2.2.18.jar diff --git a/hugegraph-server/hugegraph-postgresql/pom.xml b/hugegraph-server/hugegraph-postgresql/pom.xml index aa6068b1a2..57c9ad840f 100644 --- a/hugegraph-server/hugegraph-postgresql/pom.xml +++ b/hugegraph-server/hugegraph-postgresql/pom.xml @@ -42,7 +42,7 @@ org.postgresql postgresql - 42.4.1 + 42.4.3 diff --git a/hugegraph-server/pom.xml b/hugegraph-server/pom.xml index a112647f93..4e33703f57 100644 --- a/hugegraph-server/pom.xml +++ b/hugegraph-server/pom.xml @@ -35,59 +35,6 @@ ../pom.xml - 2017 - - - The Apache Software License, Version 2.0 - https://www.apache.org/licenses/LICENSE-2.0.txt - repo - - - - - - Apache Hugegraph(incubating) - dev-subscribe@hugegraph.apache.org - https://hugegraph.apache.org/ - - - - - - Development Mailing List - dev-subscribe@hugegraph.apache.org - dev-unsubscribe@hugegraph.apache.org - dev@hugegraph.incubator.apache.org - - - Commits List - commits-subscribe@hugegraph.apache.org - commits-unsubscribe@hugegraph.apache.org - commits@hugegraph.apache.org - - - Issues List - issues-subscribe@hugegraph.apache.org - issues-unsubscribe@hugegraph.apache.org - issues@hugegraph.apache.org - - - - - Github Issues - https://github.com/apache/hugegraph/issues - - - - https://github.com/apache/hugegraph - scm:git:https://github.com/apache/hugegraph.git - scm:git:https://github.com/apache/hugegraph.git - - - - 3.5.0 - - UTF-8 ${project.basedir}/.. @@ -99,7 +46,7 @@ 1.7.5 1.2.17 2.17.1 - 4.12 + 4.13.1 3.5.1 2.7 25.1-jre @@ -238,11 +185,6 @@ commons-io ${commons.io.version} - - com.google.guava - guava - ${guava.version} - org.apache.httpcomponents httpclient @@ -300,16 +242,13 @@ io.grpc grpc-netty ${grpc.version} - - - io.grpc - grpc-protobuf - ${grpc.version} + provided io.grpc grpc-stub ${grpc.version} + provided com.google.protobuf @@ -340,38 +279,6 @@ - - - org.codehaus.mojo - license-maven-plugin - 1.19 - - - default-cli - - - true - - /org/codehaus/mojo/license/third-party-file-groupByMultiLicense.ftl - - The Apache Software License, Version 2.0|The Apache License, Version 2.0 - The Apache Software License, Version 2.0|Apache License, Version 2.0 - The Apache Software License, Version 2.0|Apache Public License 2.0 - The Apache Software License, Version 2.0|Apache 2 - The Apache Software License, Version 2.0|Apache 2.0 - The Apache Software License, Version 2.0|Apache-2.0 - The Apache Software License, Version 2.0|Apache License 2.0 - The Apache Software License, Version 2.0|Apache License, version 2.0 - 3-Clause BSD License|BSD 3-clause - 3-Clause BSD License|BSD 3-Clause - Eclipse Public License v1.0|Eclipse Public License 1.0 - Eclipse Public License v1.0|Eclipse Public License - v 1.0 - The MIT License|MIT License - - - - - maven-compiler-plugin 3.1 @@ -467,65 +374,109 @@ - + - apache-release + core-test + + true + org.apache.maven.plugins - maven-source-plugin + maven-surefire-plugin - attach-sources + core-test - jar-no-fork + test + test + + + + + unit-test + + org.apache.maven.plugins - maven-javadoc-plugin + maven-surefire-plugin - attach-javadocs + unit-test - jar + test - - none - false - + test - - none - false - + + + + + api-test + + + + org.apache.maven.plugins + maven-surefire-plugin + + + api-test + + test + + test + + + + + + + + tinkerpop-structure-test + + + + org.apache.maven.plugins + maven-surefire-plugin + + + tinkerpop-structure-test + + test + + test + + + + + + + + tinkerpop-process-test + + org.apache.maven.plugins - maven-gpg-plugin + maven-surefire-plugin - sign-artifacts - verify + tinkerpop-process-test - sign + test + test - - - - --pinentry-mode - loopback - - diff --git a/hugegraph-store/pom.xml b/hugegraph-store/pom.xml index c2b13f6667..bc6154ad12 100644 --- a/hugegraph-store/pom.xml +++ b/hugegraph-store/pom.xml @@ -157,10 +157,158 @@ false - + + + + + + store-client-test + + true + + + + + org.apache.maven.plugins + maven-surefire-plugin + 2.20 + + + store-client-test + + test + + test + + + + + + + + store-core-test + + true + + + + + org.apache.maven.plugins + maven-surefire-plugin + 2.20 + + + store-core-test + + test + + test + + + + + + + + store-common-test + + true + + + + + org.apache.maven.plugins + maven-surefire-plugin + 2.20 + + + store-common-test + + test + + test + + + + + + + + store-rocksdb-test + + true + + + + + org.apache.maven.plugins + maven-surefire-plugin + 2.20 + + + store-rocksdb-test + + test + + test + + + + + + + + store-server-test + + true + + + + + org.apache.maven.plugins + maven-surefire-plugin + 2.20 + + + store-server-test + + test + + test + + + + + + + + store-raftcore-test + + true + + + + + org.apache.maven.plugins + maven-surefire-plugin + 2.20 + + + store-raftcore-test + + test + + test + + + + + + + diff --git a/pom.xml b/pom.xml index aacd037edf..207b9f68d4 100644 --- a/pom.xml +++ b/pom.xml @@ -45,37 +45,37 @@ - - Apache Hugegraph(incubating) - dev-subscribe@hugegraph.apache.org - https://hugegraph.apache.org/ - + + Apache Hugegraph(incubating) + dev-subscribe@hugegraph.apache.org + https://hugegraph.apache.org/ + - - Development Mailing List - dev-subscribe@hugegraph.apache.org - dev-unsubscribe@hugegraph.apache.org - dev@hugegraph.incubator.apache.org - - - Commits List - commits-subscribe@hugegraph.apache.org - commits-unsubscribe@hugegraph.apache.org - commits@hugegraph.apache.org - - - Issues List - issues-subscribe@hugegraph.apache.org - issues-unsubscribe@hugegraph.apache.org - issues@hugegraph.apache.org - + + Development Mailing List + dev-subscribe@hugegraph.apache.org + dev-unsubscribe@hugegraph.apache.org + dev@hugegraph.incubator.apache.org + + + Commits List + commits-subscribe@hugegraph.apache.org + commits-unsubscribe@hugegraph.apache.org + commits@hugegraph.apache.org + + + Issues List + issues-subscribe@hugegraph.apache.org + issues-unsubscribe@hugegraph.apache.org + issues@hugegraph.apache.org + - Github Issues - https://github.com/apache/hugegraph/issues + Github Issues + https://github.com/apache/hugegraph/issues @@ -101,7 +101,39 @@ - + + + org.codehaus.mojo + license-maven-plugin + 1.19 + + + default-cli + + + true + + /org/codehaus/mojo/license/third-party-file-groupByMultiLicense.ftl + + The Apache Software License, Version 2.0|The Apache License, Version 2.0 + The Apache Software License, Version 2.0|Apache License, Version 2.0 + The Apache Software License, Version 2.0|Apache Public License 2.0 + The Apache Software License, Version 2.0|Apache 2 + The Apache Software License, Version 2.0|Apache 2.0 + The Apache Software License, Version 2.0|Apache-2.0 + The Apache Software License, Version 2.0|Apache License 2.0 + The Apache Software License, Version 2.0|Apache License, version 2.0 + 3-Clause BSD License|BSD 3-clause + 3-Clause BSD License|BSD 3-Clause + Eclipse Public License v1.0|Eclipse Public License 1.0 + Eclipse Public License v1.0|Eclipse Public License - v 1.0 + The MIT License|MIT License + + + + + + org.apache.rat apache-rat-plugin @@ -202,375 +234,61 @@ hugegraph-pd - - - core-test - - true - - - - - org.apache.maven.plugins - maven-surefire-plugin - - - core-test - - test - - test - - - - - - - - - - - unit-test - - - - org.apache.maven.plugins - maven-surefire-plugin - - - unit-test - - test - - test - - - - - - - - api-test - - - - org.apache.maven.plugins - maven-surefire-plugin - - - api-test - - test - - test - - - - - - - - tinkerpop-structure-test - - - - org.apache.maven.plugins - maven-surefire-plugin - - - tinkerpop-structure-test - - test - - test - - - - - - - - tinkerpop-process-test - - - - org.apache.maven.plugins - maven-surefire-plugin - - - tinkerpop-process-test - - test - - test - - - - - - - - - pd-client-test - - true - - - - - org.apache.maven.plugins - maven-surefire-plugin - 2.20 - - - pd-client-test - - test - - test - - - - - - - pd-core-test - - true - + apache-release org.apache.maven.plugins - maven-surefire-plugin - 2.20 + maven-source-plugin - pd-core-test + attach-sources - test + jar-no-fork - test - - - - - pd-cli-tools-test - - true - - - org.apache.maven.plugins - maven-surefire-plugin - 2.20 + maven-javadoc-plugin - pd-cli-tools-test + attach-javadocs - test + jar - test + + none + false + + + none + false + - - - - - pd-common-test - - true - - - - - org.apache.maven.plugins - maven-surefire-plugin - 2.20 - - - pd-common-test - - test - - test - - - - - - - - pd-service-test - - true - - - - - org.apache.maven.plugins - maven-surefire-plugin - 2.20 - - - pd-service-test - - test - - test - - - - - - - - - store-client-test - - true - - - - - org.apache.maven.plugins - maven-surefire-plugin - 2.20 - - - store-client-test - - test - - test - - - - - - - - store-core-test - - true - - - - - org.apache.maven.plugins - maven-surefire-plugin - 2.20 - - - store-core-test - - test - - test - - - - - - - - store-common-test - - true - - - - - org.apache.maven.plugins - maven-surefire-plugin - 2.20 - - - store-common-test - - test - - test - - - - - - - - store-rocksdb-test - - true - - - - - org.apache.maven.plugins - maven-surefire-plugin - 2.20 - - - store-rocksdb-test - - test - - test - - - - - - - - store-server-test - - true - - - - - org.apache.maven.plugins - maven-surefire-plugin - 2.20 - - - store-server-test - - test - - test - - - - - - - - store-raftcore-test - - true - - - org.apache.maven.plugins - maven-surefire-plugin - 2.20 + maven-gpg-plugin - store-raftcore-test + sign-artifacts + verify - test + sign - test + + + + --pinentry-mode + loopback + + From 933cfdc9e483e5f281bfd168fb30989a9e444d2f Mon Sep 17 00:00:00 2001 From: VGalaxies Date: Fri, 10 Nov 2023 16:53:51 +0800 Subject: [PATCH 24/24] fix: TaskScheduler.delete test caller sets the force parameter to false --- .../main/java/org/apache/hugegraph/core/TaskCoreTest.java | 6 +++--- .../main/java/org/apache/hugegraph/tinkerpop/TestGraph.java | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/core/TaskCoreTest.java b/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/core/TaskCoreTest.java index 27cd880fae..9795353136 100644 --- a/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/core/TaskCoreTest.java +++ b/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/core/TaskCoreTest.java @@ -56,7 +56,7 @@ public void setup() { Iterator> iter = scheduler.tasks(null, -1, null); while (iter.hasNext()) { - scheduler.delete(iter.next().id(), true); + scheduler.delete(iter.next().id(), false); } } @@ -77,7 +77,7 @@ public void testTask() throws TimeoutException { Assert.assertFalse(task.completed()); Assert.assertThrows(IllegalArgumentException.class, () -> { - scheduler.delete(id, true); + scheduler.delete(id, false); }, e -> { Assert.assertContains("Can't delete incomplete task '88888'", e.getMessage()); @@ -107,7 +107,7 @@ public void testTask() throws TimeoutException { Assert.assertEquals("test-task", iter.next().name()); Assert.assertFalse(iter.hasNext()); - scheduler.delete(id, true); + scheduler.delete(id, false); iter = scheduler.tasks(null, 10, null); Assert.assertFalse(iter.hasNext()); Assert.assertThrows(NotFoundException.class, () -> { diff --git a/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/tinkerpop/TestGraph.java b/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/tinkerpop/TestGraph.java index 1dfa85fb9a..6aa01dec7e 100644 --- a/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/tinkerpop/TestGraph.java +++ b/hugegraph-server/hugegraph-test/src/main/java/org/apache/hugegraph/tinkerpop/TestGraph.java @@ -148,7 +148,7 @@ protected void clearSchema() { TaskScheduler scheduler = this.graph.taskScheduler(); scheduler.tasks(null, -1, null).forEachRemaining(elem -> { - scheduler.delete(elem.id(), true); + scheduler.delete(elem.id(), false); }); }