diff --git a/hugegraph-pd/hg-pd-client/src/main/java/org/apache/hugegraph/pd/client/PDClient.java b/hugegraph-pd/hg-pd-client/src/main/java/org/apache/hugegraph/pd/client/PDClient.java index ae57e622cb..6c86f96376 100644 --- a/hugegraph-pd/hg-pd-client/src/main/java/org/apache/hugegraph/pd/client/PDClient.java +++ b/hugegraph-pd/hg-pd-client/src/main/java/org/apache/hugegraph/pd/client/PDClient.java @@ -54,8 +54,6 @@ /** * PD客户端实现类 - * - * @author yanjinbing */ @Slf4j public class PDClient { diff --git a/hugegraph-pd/hg-pd-service/src/main/java/org/apache/hugegraph/pd/rest/API.java b/hugegraph-pd/hg-pd-service/src/main/java/org/apache/hugegraph/pd/rest/API.java index a4b83333ed..dca1d58d02 100644 --- a/hugegraph-pd/hg-pd-service/src/main/java/org/apache/hugegraph/pd/rest/API.java +++ b/hugegraph-pd/hg-pd-service/src/main/java/org/apache/hugegraph/pd/rest/API.java @@ -136,11 +136,6 @@ public String toJSON(Exception exception) { return builder; } - /** - * @param object - * @return - * @author tianxiaohui - */ public String toJSON(Object object) { ObjectMapper mapper = new ObjectMapper(); try { diff --git a/hugegraph-server/hugegraph-cassandra/src/main/java/org/apache/hugegraph/backend/store/cassandra/CassandraStore.java b/hugegraph-server/hugegraph-cassandra/src/main/java/org/apache/hugegraph/backend/store/cassandra/CassandraStore.java index b4b1bf4351..5cce218d3b 100644 --- a/hugegraph-server/hugegraph-cassandra/src/main/java/org/apache/hugegraph/backend/store/cassandra/CassandraStore.java +++ b/hugegraph-server/hugegraph-cassandra/src/main/java/org/apache/hugegraph/backend/store/cassandra/CassandraStore.java @@ -594,7 +594,7 @@ protected Collection tables() { @Override protected final CassandraTable table(HugeType type) { - return this.table(type.string()); + return this.table(convertTaskOrServerToVertex(type).string()); } protected final CassandraTable table(String name) { diff --git a/hugegraph-server/hugegraph-core/pom.xml b/hugegraph-server/hugegraph-core/pom.xml index de312c9378..2a3168b289 100644 --- a/hugegraph-server/hugegraph-core/pom.xml +++ b/hugegraph-server/hugegraph-core/pom.xml @@ -236,6 +236,32 @@ ${jjwt.version} runtime + + com.google.code.gson + gson + 2.8.9 + + + org.apache.hugegraph + hg-pd-client + ${revision} + + + org.apache.hugegraph + hg-store-common + ${revision} + + + io.etcd + jetcd-core + 0.5.9 + + + io.grpc + grpc-core + + + diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/HugeGraphParams.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/HugeGraphParams.java index e655b7c02e..dd611ae68c 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/HugeGraphParams.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/HugeGraphParams.java @@ -22,6 +22,7 @@ import org.apache.hugegraph.backend.store.BackendStore; import org.apache.hugegraph.backend.store.ram.RamTable; import org.apache.hugegraph.backend.tx.GraphTransaction; +import org.apache.hugegraph.backend.tx.ISchemaTransaction; import org.apache.hugegraph.backend.tx.SchemaTransaction; import org.apache.hugegraph.job.EphemeralJob; import org.apache.hugegraph.task.ServerInfoManager; @@ -46,7 +47,7 @@ public interface HugeGraphParams { GraphReadMode readMode(); - SchemaTransaction schemaTransaction(); + ISchemaTransaction schemaTransaction(); GraphTransaction systemTransaction(); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/StandardHugeGraph.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/StandardHugeGraph.java index db37d0a4bd..c2ea1ea43d 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/StandardHugeGraph.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/StandardHugeGraph.java @@ -18,6 +18,7 @@ package org.apache.hugegraph; import java.util.Collection; +import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Set; @@ -37,6 +38,7 @@ import org.apache.hugegraph.backend.cache.CacheNotifier.SchemaCacheNotifier; import org.apache.hugegraph.backend.cache.CachedGraphTransaction; import org.apache.hugegraph.backend.cache.CachedSchemaTransaction; +import org.apache.hugegraph.backend.cache.CachedSchemaTransactionV2; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.id.IdGenerator; import org.apache.hugegraph.backend.id.SnowflakeIdGenerator; @@ -51,9 +53,8 @@ import org.apache.hugegraph.backend.store.raft.RaftBackendStoreProvider; import org.apache.hugegraph.backend.store.raft.RaftGroupManager; import org.apache.hugegraph.backend.store.ram.RamTable; -import org.apache.hugegraph.task.EphemeralJobQueue; import org.apache.hugegraph.backend.tx.GraphTransaction; -import org.apache.hugegraph.backend.tx.SchemaTransaction; +import org.apache.hugegraph.backend.tx.ISchemaTransaction; import org.apache.hugegraph.config.CoreOptions; import org.apache.hugegraph.config.HugeConfig; import org.apache.hugegraph.config.TypedOption; @@ -69,6 +70,7 @@ import org.apache.hugegraph.masterelection.RoleElectionStateMachine; import org.apache.hugegraph.masterelection.StandardClusterRoleStore; import org.apache.hugegraph.masterelection.StandardRoleElectionStateMachine; +import org.apache.hugegraph.meta.MetaManager; import org.apache.hugegraph.perf.PerfUtil.Watched; import org.apache.hugegraph.rpc.RpcServiceConfig4Client; import org.apache.hugegraph.rpc.RpcServiceConfig4Server; @@ -84,6 +86,7 @@ import org.apache.hugegraph.structure.HugeFeatures; import org.apache.hugegraph.structure.HugeVertex; import org.apache.hugegraph.structure.HugeVertexProperty; +import org.apache.hugegraph.task.EphemeralJobQueue; import org.apache.hugegraph.task.ServerInfoManager; import org.apache.hugegraph.task.TaskManager; import org.apache.hugegraph.task.TaskScheduler; @@ -176,6 +179,8 @@ public class StandardHugeGraph implements HugeGraph { private final RamTable ramtable; + private final MetaManager metaManager = MetaManager.instance(); + public StandardHugeGraph(HugeConfig config) { this.params = new StandardHugeGraphParams(); this.configuration = config; @@ -221,6 +226,10 @@ public StandardHugeGraph(HugeConfig config) { throw new HugeException(message, e); } + if (isHstore()) { + initMetaManager(); + } + try { this.tx = new TinkerPopTransaction(this); boolean supportsPersistence = this.backendStoreFeatures().supportsPersistence(); @@ -453,9 +462,24 @@ private void clearVertexCache() { } } - private SchemaTransaction openSchemaTransaction() throws HugeException { + private boolean isHstore() { + return this.storeProvider.isHstore(); + } + + private void initMetaManager() { + this.metaManager.connect("hg", MetaManager.MetaDriverType.PD, + "ca", "ca", "ca", + Collections.singletonList("127.0.0.1:8686")); + } + + private ISchemaTransaction openSchemaTransaction() throws HugeException { this.checkGraphNotClosed(); try { + if (isHstore()) { + return new CachedSchemaTransactionV2( + MetaManager.instance().metaDriver(), + MetaManager.instance().cluster(), this.params); + } return new CachedSchemaTransaction(this.params, loadSchemaStore()); } catch (BackendException e) { String message = "Failed to open schema transaction"; @@ -500,11 +524,14 @@ private BackendStore loadGraphStore() { } private BackendStore loadSystemStore() { + if (isHstore()) { + return this.storeProvider.loadGraphStore(this.configuration); + } return this.storeProvider.loadSystemStore(this.configuration); } @Watched - private SchemaTransaction schemaTransaction() { + private ISchemaTransaction schemaTransaction() { this.checkGraphNotClosed(); /* * NOTE: each schema operation will be auto committed, @@ -1192,7 +1219,7 @@ public GraphReadMode readMode() { } @Override - public SchemaTransaction schemaTransaction() { + public ISchemaTransaction schemaTransaction() { return StandardHugeGraph.this.schemaTransaction(); } @@ -1443,7 +1470,7 @@ private void setClosed() { } } - private SchemaTransaction schemaTransaction() { + private ISchemaTransaction schemaTransaction() { return this.getOrNewTransaction().schemaTx; } @@ -1464,7 +1491,7 @@ private Txs getOrNewTransaction() { Txs txs = this.transactions.get(); if (txs == null) { - SchemaTransaction schemaTransaction = null; + ISchemaTransaction schemaTransaction = null; SysTransaction sysTransaction = null; GraphTransaction graphTransaction = null; try { @@ -1507,12 +1534,12 @@ private void destroyTransaction() { private static final class Txs { - private final SchemaTransaction schemaTx; + private final ISchemaTransaction schemaTx; private final SysTransaction systemTx; private final GraphTransaction graphTx; private long openedTime; - public Txs(SchemaTransaction schemaTx, SysTransaction systemTx, + public Txs(ISchemaTransaction schemaTx, SysTransaction systemTx, GraphTransaction graphTx) { assert schemaTx != null && systemTx != null && graphTx != null; this.schemaTx = schemaTx; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeAccess.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeAccess.java index d17131a29c..3749a314be 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeAccess.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeAccess.java @@ -229,4 +229,9 @@ private String[] initProperties() { return super.initProperties(props); } } + + public static HugeAccess fromMap(Map map) { + HugeAccess access = new HugeAccess(null, null, null); + return fromMap(map, access); + } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeBelong.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeBelong.java index f91c26af0b..ffd5d3bc5f 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeBelong.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeBelong.java @@ -33,10 +33,15 @@ public class HugeBelong extends Relationship { + public static final String UG = "ug"; + public static final String UR = "ur"; + public static final String GR = "gr"; + public static final String ALL = "*"; private static final long serialVersionUID = -7242751631755533423L; private final Id user; private final Id group; + private String link; private String description; public HugeBelong(Id user, Id group) { @@ -75,6 +80,10 @@ public Id target() { return this.group; } + public String link() { + return this.link; + } + public String description() { return this.description; } @@ -196,4 +205,9 @@ private String[] initProperties() { return super.initProperties(props); } } + + public static HugeBelong fromMap(Map map) { + HugeBelong belong = new HugeBelong(null, null); + return fromMap(map, belong); + } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeGroup.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeGroup.java index 1bced91d9f..acc7973043 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeGroup.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeGroup.java @@ -37,6 +37,7 @@ public class HugeGroup extends Entity { private static final long serialVersionUID = 2330399818352242686L; private String name; + private String nickname; private String description; public HugeGroup(String name) { @@ -68,6 +69,14 @@ public String name() { return this.name; } + public String nickname() { + return this.nickname; + } + + public void nickname(String nickname) { + this.nickname = nickname; + } + public String description() { return this.description; } @@ -195,4 +204,9 @@ protected String[] initProperties() { return super.initProperties(props); } } + + public static HugeGroup fromMap(Map map) { + HugeGroup group = new HugeGroup(""); + return fromMap(map, group); + } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeRole.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeRole.java new file mode 100644 index 0000000000..ee5955bbaf --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeRole.java @@ -0,0 +1,240 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.auth; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.commons.lang3.StringUtils; +import org.apache.hugegraph.HugeGraphParams; +import org.apache.hugegraph.auth.SchemaDefine.Entity; +import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.backend.id.IdGenerator; +import org.apache.hugegraph.schema.VertexLabel; +import org.apache.hugegraph.util.E; +import org.apache.tinkerpop.gremlin.structure.Graph.Hidden; +import org.apache.tinkerpop.gremlin.structure.T; + +public class HugeRole extends Entity { + + private static final long serialVersionUID = 2330399818352242686L; + + private String name; + private String nickname; + private String graphSpace; + private String description; + + public HugeRole(Id id, String name, String graphSpace) { + this.id = id; + this.name = name; + this.graphSpace = graphSpace; + this.description = null; + } + + public HugeRole(String name, String graphSpace) { + this(StringUtils.isNotEmpty(name) ? IdGenerator.of(name) : null, + name, graphSpace); + } + + public HugeRole(Id id, String graphSpace) { + this(id, id.asString(), graphSpace); + } + + public static HugeRole fromMap(Map map) { + HugeRole role = new HugeRole("", ""); + return fromMap(map, role); + } + + public static Schema schema(HugeGraphParams graph) { + return new Schema(graph); + } + + @Override + public ResourceType type() { + return ResourceType.GRANT; + } + + @Override + public String label() { + return P.ROLE; + } + + @Override + public String name() { + return this.name; + } + + public void name(String name) { + this.name = name; + } + + public String nickname() { + return this.nickname; + } + + public void nickname(String nickname) { + this.nickname = nickname; + } + + public String graphSpace() { + return this.graphSpace; + } + + public String description() { + return this.description; + } + + public void description(String description) { + this.description = description; + } + + @Override + public String toString() { + return String.format("HugeGroup(%s)", this.id); + } + + @Override + protected boolean property(String key, Object value) { + if (super.property(key, value)) { + return true; + } + switch (key) { + case P.GRAPHSPACE: + this.graphSpace = (String) value; + break; + case P.NAME: + this.name = (String) value; + break; + case P.NICKNAME: + this.nickname = (String) value; + break; + case P.DESCRIPTION: + this.description = (String) value; + break; + default: + throw new AssertionError("Unsupported key: " + key); + } + return true; + } + + @Override + protected Object[] asArray() { + E.checkState(this.name != null, "Group name can't be null"); + + List list = new ArrayList<>(12); + + list.add(T.label); + list.add(P.ROLE); + + list.add(P.GRAPHSPACE); + list.add(this.graphSpace); + + list.add(P.NAME); + list.add(this.name); + + if (this.nickname != null) { + list.add(P.NICKNAME); + list.add(this.nickname); + } + + if (this.description != null) { + list.add(P.DESCRIPTION); + list.add(this.description); + } + + return super.asArray(list); + } + + @Override + public Map asMap() { + E.checkState(this.name != null, "Group name can't be null"); + + Map map = new HashMap<>(); + + map.put(Hidden.unHide(P.NAME), this.name); + map.put(Hidden.unHide(P.GRAPHSPACE), this.graphSpace); + if (this.description != null) { + map.put(Hidden.unHide(P.DESCRIPTION), this.description); + } + + if (this.nickname != null) { + map.put(Hidden.unHide(P.NICKNAME), this.nickname); + } + + return super.asMap(map); + } + + public static final class P { + + public static final String ROLE = Hidden.hide("role"); + + public static final String ID = T.id.getAccessor(); + public static final String LABEL = T.label.getAccessor(); + + public static final String NAME = "~role_name"; + public static final String NICKNAME = "~role_nickname"; + public static final String GRAPHSPACE = "~graphspace"; + public static final String DESCRIPTION = "~role_description"; + + public static String unhide(String key) { + final String prefix = Hidden.hide("role_"); + if (key.startsWith(prefix)) { + return key.substring(prefix.length()); + } + return key; + } + } + + public static final class Schema extends SchemaDefine { + + public Schema(HugeGraphParams graph) { + super(graph, P.ROLE); + } + + @Override + public void initSchemaIfNeeded() { + if (this.existVertexLabel(this.label)) { + return; + } + + String[] properties = this.initProperties(); + + // Create vertex label + VertexLabel label = this.schema().vertexLabel(this.label) + .properties(properties) + .usePrimaryKeyId() + .primaryKeys(P.NAME) + .nullableKeys(P.DESCRIPTION, P.NICKNAME) + .enableLabelIndex(true) + .build(); + this.graph.schemaTransaction().addVertexLabel(label); + } + + protected String[] initProperties() { + List props = new ArrayList<>(); + + props.add(createPropertyKey(P.NAME)); + props.add(createPropertyKey(P.DESCRIPTION)); + props.add(createPropertyKey(P.NICKNAME)); + + return super.initProperties(props); + } + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeTarget.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeTarget.java index ed4207565c..f4f9c1f050 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeTarget.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeTarget.java @@ -42,6 +42,7 @@ public class HugeTarget extends Entity { private String name; private String graph; + private String description; private String url; private List resources; @@ -92,6 +93,18 @@ public String graph() { return this.graph; } + public void graph(String graph) { + this.graph = graph; + } + + public String description() { + return this.description; + } + + public void description(String description) { + this.description = description; + } + public String url() { return this.url; } @@ -258,4 +271,9 @@ private String[] initProperties() { return super.initProperties(props); } } + + public static HugeTarget fromMap(Map map) { + HugeTarget target = new HugeTarget(null); + return fromMap(map, target); + } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeUser.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeUser.java index cc386fe57d..510e837552 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeUser.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/HugeUser.java @@ -37,6 +37,7 @@ public class HugeUser extends Entity { private static final long serialVersionUID = -8951193710873772717L; private String name; + private String nickname; private String password; private String phone; private String email; @@ -74,6 +75,14 @@ public String name() { return this.name; } + public String nickname() { + return nickname; + } + + public void nickname(String nickname) { + this.nickname = nickname; + } + public String password() { return this.password; } @@ -281,4 +290,9 @@ private String[] initProperties() { return super.initProperties(props); } } + + public static HugeUser fromMap(Map map) { + HugeUser user = new HugeUser(""); + return fromMap(map, user); + } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/SchemaDefine.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/SchemaDefine.java index 80020638b7..ff4c5fa2ed 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/SchemaDefine.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/auth/SchemaDefine.java @@ -25,6 +25,7 @@ import org.apache.hugegraph.auth.HugeTarget.P; import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.backend.id.IdGenerator; import org.apache.hugegraph.schema.IndexLabel; import org.apache.hugegraph.schema.PropertyKey; import org.apache.hugegraph.schema.SchemaManager; @@ -245,6 +246,14 @@ public abstract static class Entity extends AuthElement private static final long serialVersionUID = 4113319546914811762L; + public static T fromMap(Map map, T entity) { + for (Map.Entry item : map.entrySet()) { + entity.property(Hidden.hide(item.getKey()), item.getValue()); + } + entity.id(IdGenerator.of(entity.name())); + return entity; + } + public static T fromVertex(Vertex vertex, T entity) { E.checkArgument(vertex.label().equals(entity.label()), "Illegal vertex label '%s' for entity '%s'", @@ -281,6 +290,19 @@ public abstract static class Relationship extends AuthElement { public abstract Id target(); + public void setId() { + this.id(IdGenerator.of(this.source().asString() + "->" + + this.target().asString())); + } + + public static T fromMap(Map map, T entity) { + for (Map.Entry item : map.entrySet()) { + entity.property(Hidden.hide(item.getKey()), item.getValue()); + } + entity.setId(); + return entity; + } + public static T fromEdge(Edge edge, T relationship) { E.checkArgument(edge.label().equals(relationship.label()), diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/CachedSchemaTransactionV2.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/CachedSchemaTransactionV2.java new file mode 100644 index 0000000000..6e5d6dca1f --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/cache/CachedSchemaTransactionV2.java @@ -0,0 +1,469 @@ +package org.apache.hugegraph.backend.cache; + +import java.util.ArrayList; +import java.util.List; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.function.Consumer; + +import org.apache.hugegraph.HugeGraphParams; +import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.backend.id.IdGenerator; +import org.apache.hugegraph.backend.store.ram.IntObjectMap; +import org.apache.hugegraph.backend.tx.SchemaTransactionV2; +import org.apache.hugegraph.config.CoreOptions; +import org.apache.hugegraph.event.EventHub; +import org.apache.hugegraph.event.EventListener; +import org.apache.hugegraph.meta.MetaDriver; +import org.apache.hugegraph.meta.MetaManager; +import org.apache.hugegraph.perf.PerfUtil; +import org.apache.hugegraph.schema.SchemaElement; +import org.apache.hugegraph.type.HugeType; +import org.apache.hugegraph.util.E; +import org.apache.hugegraph.util.Events; + +import com.google.common.collect.ImmutableSet; + +public class CachedSchemaTransactionV2 extends SchemaTransactionV2 { + private final Cache idCache; + private final Cache nameCache; + + private final SchemaCaches arrayCaches; + + private EventListener storeEventListener; + private EventListener cacheEventListener; + + public CachedSchemaTransactionV2(MetaDriver metaDriver, + String cluster, + HugeGraphParams graphParams) { + super(metaDriver, cluster, graphParams); + + final long capacity = graphParams.configuration() + .get(CoreOptions.SCHEMA_CACHE_CAPACITY); + this.idCache = this.cache("schema-id", capacity); + this.nameCache = this.cache("schema-name", capacity); + + SchemaCaches attachment = this.idCache.attachment(); + if (attachment == null) { + int acSize = (int) (capacity >> 3); + attachment = this.idCache.attachment(new SchemaCaches<>(acSize)); + } + this.arrayCaches = attachment; + this.listenChanges(); + } + + private static Id generateId(HugeType type, Id id) { + // NOTE: it's slower performance to use: + // String.format("%x-%s", type.code(), name) + return IdGenerator.of(type.string() + "-" + id.asString()); + } + + private static Id generateId(HugeType type, String name) { + return IdGenerator.of(type.string() + "-" + name); + } + + public void close() { + this.clearCache(false); + this.unlistenChanges(); + } + + private Cache cache(String prefix, long capacity) { + // TODO: uncomment later - graph space + //final String name = prefix + "-" + this.graph().spaceGraphName(); + final String name = prefix + "-" + ""; + // NOTE: must disable schema cache-expire due to getAllSchema() + return CacheManager.instance().cache(name, capacity); + } + + private void listenChanges() { + // Listen store event: "store.init", "store.clear", ... + Set storeEvents = ImmutableSet.of(Events.STORE_INIT, + Events.STORE_CLEAR, + Events.STORE_TRUNCATE); + this.storeEventListener = event -> { + if (storeEvents.contains(event.name())) { + LOG.debug("Graph {} clear schema cache on event '{}'", + this.graph(), event.name()); + this.clearCache(true); + return true; + } + return false; + }; + this.graphParams().loadGraphStore().provider().listen(this.storeEventListener); + + // Listen cache event: "cache"(invalid cache item) + this.cacheEventListener = event -> { + LOG.debug("Graph {} received schema cache event: {}", + this.graph(), event); + Object[] args = event.args(); + E.checkArgument(args.length > 0 && args[0] instanceof String, + "Expect event action argument"); + if (Cache.ACTION_INVALID.equals(args[0])) { + event.checkArgs(String.class, HugeType.class, Id.class); + HugeType type = (HugeType) args[1]; + Id id = (Id) args[2]; + this.arrayCaches.remove(type, id); + + id = generateId(type, id); + Object value = this.idCache.get(id); + if (value != null) { + // Invalidate id cache + this.idCache.invalidate(id); + + // Invalidate name cache + SchemaElement schema = (SchemaElement) value; + Id prefixedName = generateId(schema.type(), + schema.name()); + this.nameCache.invalidate(prefixedName); + } + this.resetCachedAll(type); + return true; + } else if (Cache.ACTION_CLEAR.equals(args[0])) { + event.checkArgs(String.class, HugeType.class); + this.clearCache(false); + return true; + } + return false; + }; + EventHub schemaEventHub = this.graphParams().schemaEventHub(); + if (!schemaEventHub.containsListener(Events.CACHE)) { + schemaEventHub.listen(Events.CACHE, this.cacheEventListener); + } + } + + public void clearCache(boolean notify) { + this.idCache.clear(); + this.nameCache.clear(); + this.arrayCaches.clear(); + } + + private void resetCachedAllIfReachedCapacity() { + if (this.idCache.size() >= this.idCache.capacity()) { + LOG.warn("Schema cache reached capacity({}): {}", + this.idCache.capacity(), this.idCache.size()); + this.cachedTypes().clear(); + } + } + + private void unlistenChanges() { + // Unlisten store event + this.graphParams().loadGraphStore().provider() + .unlisten(this.storeEventListener); + + // Unlisten cache event + EventHub schemaEventHub = this.graphParams().schemaEventHub(); + schemaEventHub.unlisten(Events.CACHE, this.cacheEventListener); + } + + private CachedTypes cachedTypes() { + return this.arrayCaches.cachedTypes(); + } + + private void resetCachedAll(HugeType type) { + // Set the cache all flag of the schema type to false + this.cachedTypes().put(type, false); + } + + private void invalidateCache(HugeType type, Id id) { + // remove from id cache and name cache + Id prefixedId = generateId(type, id); + Object value = this.idCache.get(prefixedId); + if (value != null) { + this.idCache.invalidate(prefixedId); + + SchemaElement schema = (SchemaElement) value; + Id prefixedName = generateId(schema.type(), schema.name()); + this.nameCache.invalidate(prefixedName); + } + + // remove from optimized array cache + this.arrayCaches.remove(type, id); + } + + @Override + protected void updateSchema(SchemaElement schema, + Consumer updateCallback) { + super.updateSchema(schema, updateCallback); + + this.updateCache(schema); + } + + @Override + protected void addSchema(SchemaElement schema) { + super.addSchema(schema); + + this.updateCache(schema); + + if (!this.graph().option(CoreOptions.TASK_SYNC_DELETION)) { + MetaManager.instance() + // TODO: uncomment later - graph space + //.notifySchemaCacheClear(this.graph().graphSpace(), + // this.graph().name()); + .notifySchemaCacheClear("", + this.graph().name()); + } + } + + private void updateCache(SchemaElement schema) { + this.resetCachedAllIfReachedCapacity(); + + // update id cache + Id prefixedId = generateId(schema.type(), schema.id()); + this.idCache.update(prefixedId, schema); + + // update name cache + Id prefixedName = generateId(schema.type(), schema.name()); + this.nameCache.update(prefixedName, schema); + + // update optimized array cache + this.arrayCaches.updateIfNeeded(schema); + } + + @Override + protected void removeSchema(SchemaElement schema) { + super.removeSchema(schema); + + this.invalidateCache(schema.type(), schema.id()); + + if (!this.graph().option(CoreOptions.TASK_SYNC_DELETION)) { + MetaManager.instance() + // TODO: uncomment later - graph space + //.notifySchemaCacheClear(this.graph().graphSpace(), + // this.graph().name()); + .notifySchemaCacheClear("", + this.graph().name()); + } + } + + @Override + @SuppressWarnings("unchecked") + protected T getSchema(HugeType type, Id id) { + // try get from optimized array cache + if (id.number() && id.asLong() > 0L) { + SchemaElement value = this.arrayCaches.get(type, id); + if (value != null) { + return (T) value; + } + } + + Id prefixedId = generateId(type, id); + Object value = this.idCache.get(prefixedId); + if (value == null) { + value = super.getSchema(type, id); + if (value != null) { + this.resetCachedAllIfReachedCapacity(); + + this.idCache.update(prefixedId, value); + + SchemaElement schema = (SchemaElement) value; + Id prefixedName = generateId(schema.type(), schema.name()); + this.nameCache.update(prefixedName, schema); + } + } + + // update optimized array cache + this.arrayCaches.updateIfNeeded((SchemaElement) value); + + return (T) value; + } + + @Override + @SuppressWarnings("unchecked") + protected T getSchema(HugeType type, + String name) { + Id prefixedName = generateId(type, name); + Object value = this.nameCache.get(prefixedName); + if (value == null) { + value = super.getSchema(type, name); + if (value != null) { + this.resetCachedAllIfReachedCapacity(); + + this.nameCache.update(prefixedName, value); + + SchemaElement schema = (SchemaElement) value; + Id prefixedId = generateId(schema.type(), schema.id()); + this.idCache.update(prefixedId, schema); + } + } + return (T) value; + } + + @Override + protected List getAllSchema(HugeType type) { + Boolean cachedAll = this.cachedTypes().getOrDefault(type, false); + List results; + if (cachedAll) { + results = new ArrayList<>(); + // Get from cache + this.idCache.traverse(value -> { + @SuppressWarnings("unchecked") + T schema = (T) value; + if (schema.type() == type) { + results.add(schema); + } + }); + return results; + } else { + results = super.getAllSchema(type); + long free = this.idCache.capacity() - this.idCache.size(); + if (results.size() <= free) { + // Update cache + for (T schema : results) { + Id prefixedId = generateId(schema.type(), schema.id()); + this.idCache.update(prefixedId, schema); + + Id prefixedName = generateId(schema.type(), schema.name()); + this.nameCache.update(prefixedName, schema); + } + this.cachedTypes().putIfAbsent(type, true); + } + return results; + } + } + + @Override + public void clear() { + // Clear schema info firstly + super.clear(); + this.clearCache(false); + } + + private static final class SchemaCaches { + + private final int size; + + private final IntObjectMap pks; + private final IntObjectMap vls; + private final IntObjectMap els; + private final IntObjectMap ils; + + private final CachedTypes cachedTypes; + + public SchemaCaches(int size) { + // TODO: improve size of each type for optimized array cache + this.size = size; + + this.pks = new IntObjectMap<>(size); + this.vls = new IntObjectMap<>(size); + this.els = new IntObjectMap<>(size); + this.ils = new IntObjectMap<>(size); + + this.cachedTypes = new CachedTypes(); + } + + public void updateIfNeeded(V schema) { + if (schema == null) { + return; + } + Id id = schema.id(); + if (id.number() && id.asLong() > 0L) { + this.set(schema.type(), id, schema); + } + } + + @PerfUtil.Watched + public V get(HugeType type, Id id) { + assert id.number(); + long longId = id.asLong(); + if (longId <= 0L) { + assert false : id; + return null; + } + int key = (int) longId; + if (key >= this.size) { + return null; + } + switch (type) { + case PROPERTY_KEY: + return this.pks.get(key); + case VERTEX_LABEL: + return this.vls.get(key); + case EDGE_LABEL: + return this.els.get(key); + case INDEX_LABEL: + return this.ils.get(key); + default: + return null; + } + } + + public void set(HugeType type, Id id, V value) { + assert id.number(); + long longId = id.asLong(); + if (longId <= 0L) { + assert false : id; + return; + } + int key = (int) longId; + if (key >= this.size) { + return; + } + switch (type) { + case PROPERTY_KEY: + this.pks.set(key, value); + break; + case VERTEX_LABEL: + this.vls.set(key, value); + break; + case EDGE_LABEL: + this.els.set(key, value); + break; + case INDEX_LABEL: + this.ils.set(key, value); + break; + default: + // pass + break; + } + } + + public void remove(HugeType type, Id id) { + assert id.number(); + long longId = id.asLong(); + if (longId <= 0L) { + return; + } + int key = (int) longId; + V value = null; + if (key >= this.size) { + return; + } + switch (type) { + case PROPERTY_KEY: + this.pks.set(key, value); + break; + case VERTEX_LABEL: + this.vls.set(key, value); + break; + case EDGE_LABEL: + this.els.set(key, value); + break; + case INDEX_LABEL: + this.ils.set(key, value); + break; + default: + // pass + break; + } + } + + public void clear() { + this.pks.clear(); + this.vls.clear(); + this.els.clear(); + this.ils.clear(); + + this.cachedTypes.clear(); + } + + public CachedTypes cachedTypes() { + return this.cachedTypes; + } + } + + private static class CachedTypes + extends ConcurrentHashMap { + + private static final long serialVersionUID = -2215549791679355996L; + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/id/IdGenerator.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/id/IdGenerator.java index c6f77a0f2b..977e26bb82 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/id/IdGenerator.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/id/IdGenerator.java @@ -379,7 +379,7 @@ public String toString() { /** * This class is just used by backend store for wrapper object as Id */ - private static final class ObjectId implements Id { + public static final class ObjectId implements Id { private final Object object; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/Condition.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/Condition.java index 27fc822f30..e6f04a9615 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/Condition.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/Condition.java @@ -17,6 +17,7 @@ package org.apache.hugegraph.backend.query; +import java.io.Serializable; import java.util.ArrayList; import java.util.Collection; import java.util.Date; @@ -47,7 +48,8 @@ public enum ConditionType { NONE, RELATION, AND, - OR; + OR, + NOT; } public enum RelationType implements BiPredicate { @@ -300,7 +302,8 @@ public boolean isRelation() { public boolean isLogic() { return this.type() == ConditionType.AND || - this.type() == ConditionType.OR; + this.type() == ConditionType.OR || + this.type() == ConditionType.NOT; } public boolean isFlattened() { @@ -315,6 +318,10 @@ public static Condition or(Condition left, Condition right) { return new Or(left, right); } + public static Condition not(Condition condition) { + return new Not(condition); + } + public static Relation eq(HugeKeys key, Object value) { return new SyspropRelation(key, RelationType.EQ, value); } @@ -536,6 +543,79 @@ public Condition copy() { } } + public static class Not extends Condition implements Serializable { + Condition condition; + + public Not(Condition condition) { + super(); + this.condition = condition; + } + + public Condition condition() { + return condition; + } + + @Override + public ConditionType type() { + return ConditionType.NOT; + } + + @Override + public boolean test(Object value) { + return !this.condition.test(value); + } + + @Override + public boolean test(HugeElement element) { + return !this.condition.test(element); + } + + @Override + public Condition copy() { + return new Not(this.condition.copy()); + } + + @Override + public boolean isSysprop() { + return this.condition.isSysprop(); + } + + @Override + public List relations() { + return new ArrayList(this.condition.relations()); + } + + @Override + public Condition replace(Relation from, Relation to) { + this.condition = this.condition.replace(from, to); + return this; + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(64); + sb.append(this.type().name()).append(' '); + sb.append(this.condition); + return sb.toString(); + } + + @Override + public boolean equals(Object object) { + if (!(object instanceof Not)) { + return false; + } + Not other = (Not) object; + return this.type().equals(other.type()) && + this.condition.equals(other.condition()); + } + + @Override + public int hashCode() { + return this.type().hashCode() ^ + this.condition.hashCode(); + } + } + public abstract static class Relation extends Condition { // Relational operator (like: =, >, <, in, ...) @@ -565,6 +645,10 @@ public Object value() { return this.value; } + public void value(Object value) { + this.value = value; + } + public void serialKey(Object key) { this.serialKey = key; } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/ConditionQuery.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/ConditionQuery.java index 8de686746d..7804388ca2 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/ConditionQuery.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/ConditionQuery.java @@ -18,6 +18,7 @@ package org.apache.hugegraph.backend.query; import java.math.BigDecimal; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -34,6 +35,8 @@ import org.apache.hugegraph.backend.id.SplicingIdGenerator; import org.apache.hugegraph.backend.query.Condition.Relation; import org.apache.hugegraph.backend.query.Condition.RelationType; +import org.apache.hugegraph.backend.query.serializer.QueryAdapter; +import org.apache.hugegraph.backend.query.serializer.QueryIdAdapter; import org.apache.hugegraph.perf.PerfUtil.Watched; import org.apache.hugegraph.structure.HugeElement; import org.apache.hugegraph.structure.HugeProperty; @@ -44,9 +47,12 @@ import org.apache.hugegraph.util.InsertionOrderUtil; import org.apache.hugegraph.util.LongEncoding; import org.apache.hugegraph.util.NumericUtil; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Sets; +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; public class ConditionQuery extends IdQuery { @@ -71,6 +77,12 @@ public class ConditionQuery extends IdQuery { private static final List EMPTY_CONDITIONS = ImmutableList.of(); + private static final Gson gson = new GsonBuilder() + .registerTypeAdapter(Condition.class, new QueryAdapter()) + .registerTypeAdapter(Id.class, new QueryIdAdapter()) + .setDateFormat("yyyy-MM-dd HH:mm:ss.SSS") + .create(); + // Conditions will be contacted with `and` by default private List conditions = EMPTY_CONDITIONS; @@ -681,6 +693,18 @@ public static String concatValues(Object value) { } } + public static ConditionQuery fromBytes(byte[] bytes) { + Gson gson = new GsonBuilder() + .registerTypeAdapter(Condition.class, new QueryAdapter()) + .registerTypeAdapter(Id.class, new QueryIdAdapter()) + .setDateFormat("yyyy-MM-dd HH:mm:ss.SSS") + .create(); + String cqs = new String(bytes, StandardCharsets.UTF_8); + ConditionQuery conditionQuery = gson.fromJson(cqs, ConditionQuery.class); + + return conditionQuery; + } + private static boolean needConvertNumber(Object value) { // Numeric or date values should be converted to number from string return NumericUtil.isNumber(value) || value instanceof Date; @@ -870,4 +894,9 @@ public interface ResultsFilter { boolean test(HugeElement element); } + + public byte[] bytes() { + String cqs = gson.toJson(this); + return cqs.getBytes(StandardCharsets.UTF_8); + } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/serializer/AbstractSerializerAdapter.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/serializer/AbstractSerializerAdapter.java new file mode 100644 index 0000000000..1a66ddf074 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/serializer/AbstractSerializerAdapter.java @@ -0,0 +1,66 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.backend.query.serializer; + +import java.lang.reflect.Type; +import java.util.Map; + +import org.apache.hugegraph.backend.BackendException; + +import com.google.gson.JsonDeserializationContext; +import com.google.gson.JsonDeserializer; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonParseException; +import com.google.gson.JsonPrimitive; +import com.google.gson.JsonSerializationContext; +import com.google.gson.JsonSerializer; + +// TODO: optimize by binary protocol +public abstract class AbstractSerializerAdapter implements JsonSerializer, + JsonDeserializer { + + //Note: By overriding the method to get the mapping + public abstract Map validType(); + + @Override + public T deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws + JsonParseException { + JsonObject object = json.getAsJsonObject(); + String type = object.get("cls").getAsString(); + JsonElement element = object.get("el"); + try { + return context.deserialize(element, validType().get(type)); + } catch (Exception e) { + throw new BackendException("Unknown element type: " + type, e); + } + } + + /* + * Note: Currently, only the first character of the class name is taken as the key + * to reduce serialization results + * */ + @Override + public JsonElement serialize(T src, Type typeOfSrc, JsonSerializationContext context) { + JsonObject result = new JsonObject(); + Class clazz = src.getClass(); + result.add("cls", new JsonPrimitive(clazz.getSimpleName().substring(0, 1).toUpperCase())); + result.add("el", context.serialize(src, clazz)); + return result; + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/serializer/QueryAdapter.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/serializer/QueryAdapter.java new file mode 100644 index 0000000000..041a75cba2 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/serializer/QueryAdapter.java @@ -0,0 +1,148 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.backend.query.serializer; + +import java.lang.reflect.Type; +import java.util.ArrayList; +import java.util.Date; +import java.util.List; +import java.util.Map; + +import org.apache.hugegraph.backend.query.Condition; +import org.apache.hugegraph.type.define.Directions; + +import com.google.common.collect.ImmutableMap; +import com.google.gson.JsonDeserializationContext; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonParseException; +import com.google.gson.JsonPrimitive; +import com.google.gson.JsonSerializationContext; +import com.google.gson.reflect.TypeToken; + +public class QueryAdapter extends AbstractSerializerAdapter { + + static ImmutableMap cls = + ImmutableMap.builder() + // TODO: uncomment later + .put("N", Condition.Not.class) + .put("A", Condition.And.class) + .put("O", Condition.Or.class) + .put("S", Condition.SyspropRelation.class) + .put("U", Condition.UserpropRelation.class) + .build(); + + static boolean isPrimitive(Class clz) { + try { + return (clz == Date.class) || ((Class) clz.getField("TYPE").get(null)).isPrimitive(); + } catch (Exception e) { + return false; + } + } + + @Override + public Map validType() { + return cls; + } + + @Override + public Condition deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) + throws JsonParseException { + Condition condition = super.deserialize(json, typeOfT, context); + if (condition instanceof Condition.Relation) { + JsonObject object = json.getAsJsonObject(); + if (object.has("el")) { + JsonElement elElement = object.get("el"); + JsonElement valueElement = elElement.getAsJsonObject().get("value"); + if (valueElement.isJsonObject()) { + String cls = valueElement.getAsJsonObject().get("cls").getAsString(); + try { + Class actualClass = Class.forName(cls); + Object obj = context.deserialize(valueElement, actualClass); + ((Condition.Relation) condition).value(obj); + } catch (ClassNotFoundException e) { + throw new JsonParseException(e.getMessage()); + } + } else if (elElement.getAsJsonObject().has("valuecls")) { + if (valueElement.isJsonArray()) { + String cls = elElement.getAsJsonObject().get("valuecls").getAsString(); + try { + Class actualClass = Class.forName(cls); + Type type = TypeToken.getParameterized(ArrayList.class, actualClass) + .getType(); + Object value = context.deserialize(valueElement, type); + ((Condition.Relation) condition).value(value); + } catch (ClassNotFoundException e) { + throw new JsonParseException(e.getMessage()); + } + } else { + String cls = elElement.getAsJsonObject().get("valuecls").getAsString(); + try { + Class actualClass = Class.forName(cls); + Object obj = context.deserialize(valueElement, actualClass); + ((Condition.Relation) condition).value(obj); + } catch (ClassNotFoundException e) { + throw new JsonParseException(e.getMessage()); + } + } + + } else if (valueElement.isJsonPrimitive() && + valueElement.getAsJsonPrimitive().isString()) { + switch ((String) ((Condition.Relation) condition).value()) { + case "OUT": + ((Condition.Relation) condition).value(Directions.OUT); + break; + case "IN": + ((Condition.Relation) condition).value(Directions.IN); + break; + default: + break; + } + } + } + } + return condition; + } + + @Override + public JsonElement serialize(Condition src, Type typeOfSrc, JsonSerializationContext context) { + JsonElement result = super.serialize(src, typeOfSrc, context); + if (src instanceof Condition.Relation) { + JsonObject object = result.getAsJsonObject(); + JsonElement valueElement = object.get("el").getAsJsonObject().get("value"); + if (valueElement.isJsonObject()) { + valueElement.getAsJsonObject() + .add("cls", + new JsonPrimitive( + ((Condition.Relation) src).value().getClass().getName())); + } else if (isPrimitive(((Condition.Relation) src).value().getClass())) { + object.get("el").getAsJsonObject() + .add("valuecls", + new JsonPrimitive( + ((Condition.Relation) src).value().getClass().getName())); + } else if (valueElement.isJsonArray()) { + if (((Condition.Relation) src).value() instanceof List) { + String valueCls = + ((List) ((Condition.Relation) src).value()).get(0).getClass().getName(); + object.get("el").getAsJsonObject().add("valuecls", new JsonPrimitive(valueCls)); + } + } + } + return result; + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/serializer/QueryIdAdapter.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/serializer/QueryIdAdapter.java new file mode 100644 index 0000000000..d22db48a29 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/query/serializer/QueryIdAdapter.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.backend.query.serializer; + +import java.lang.reflect.Type; +import java.util.Map; + +import org.apache.hugegraph.backend.id.EdgeId; +import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.backend.id.IdGenerator; +import org.apache.hugegraph.backend.serializer.BinaryBackendEntry; + +import com.google.common.collect.ImmutableMap; + +public class QueryIdAdapter extends AbstractSerializerAdapter { + + static ImmutableMap cls = + ImmutableMap.builder() + .put("E", EdgeId.class) + .put("S", IdGenerator.StringId.class) + .put("L", IdGenerator.LongId.class) + .put("U", IdGenerator.UuidId.class) + .put("O", IdGenerator.ObjectId.class) + .put("B", BinaryBackendEntry.BinaryId.class) + .build(); + + @Override + public Map validType() { + return cls; + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/AbstractSerializer.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/AbstractSerializer.java index 6be90d894c..c3c9d3e232 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/AbstractSerializer.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/AbstractSerializer.java @@ -17,14 +17,19 @@ package org.apache.hugegraph.backend.serializer; +import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.backend.BackendException; +import org.apache.hugegraph.backend.id.EdgeId; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.query.ConditionQuery; import org.apache.hugegraph.backend.query.IdQuery; import org.apache.hugegraph.backend.query.Query; import org.apache.hugegraph.backend.store.BackendEntry; +import org.apache.hugegraph.iterator.CIter; +import org.apache.hugegraph.structure.HugeVertex; import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.config.HugeConfig; +import org.apache.tinkerpop.gremlin.structure.Edge; public abstract class AbstractSerializer implements GraphSerializer, SchemaSerializer { @@ -89,4 +94,8 @@ public Query writeQuery(Query query) { return query; } + + public CIter readEdges(HugeGraph graph, BackendEntry bytesEntry) { + throw new RuntimeException("Method not implemented error."); + } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/BinaryBackendEntry.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/BinaryBackendEntry.java index c801e16a3b..cd786a0162 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/BinaryBackendEntry.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/BinaryBackendEntry.java @@ -199,7 +199,7 @@ public int hashCode() { return this.id().hashCode() ^ this.columns.size(); } - protected static final class BinaryId implements Id { + public static final class BinaryId implements Id { private final byte[] bytes; private final Id id; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/BinarySerializer.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/BinarySerializer.java index 92809e823c..c33168b009 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/BinarySerializer.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/BinarySerializer.java @@ -17,6 +17,8 @@ package org.apache.hugegraph.backend.serializer; +import static org.apache.hugegraph.schema.SchemaElement.UNDEF; + import java.util.Arrays; import java.util.Collection; import java.util.Iterator; @@ -33,7 +35,10 @@ import org.apache.hugegraph.backend.page.PageState; import org.apache.hugegraph.backend.store.BackendEntry; import org.apache.hugegraph.backend.store.BackendEntry.BackendColumn; +import org.apache.hugegraph.iterator.CIter; +import org.apache.hugegraph.iterator.MapperIterator; import org.apache.hugegraph.type.HugeType; +import org.apache.hugegraph.type.define.EdgeLabelType; import org.apache.hugegraph.util.*; import org.apache.hugegraph.backend.query.Condition; import org.apache.hugegraph.backend.query.Condition.RangeConditions; @@ -68,6 +73,7 @@ import org.apache.hugegraph.type.define.WriteType; import org.apache.hugegraph.util.JsonUtil; import org.apache.hugegraph.util.StringEncoding; +import org.apache.tinkerpop.gremlin.structure.Edge; public class BinarySerializer extends AbstractSerializer { @@ -523,6 +529,40 @@ public HugeEdge readEdge(HugeGraph graph, BackendEntry bytesEntry) { return edges.iterator().next(); } + @Override + public CIter readEdges(HugeGraph graph, BackendEntry bytesEntry) { + + BinaryBackendEntry entry = this.convertEntry(bytesEntry); + + // Parse id + Id id = entry.id().origin(); + Id vid = id.edge() ? ((EdgeId) id).ownerVertexId() : id; + HugeVertex vertex = new HugeVertex(graph, vid, VertexLabel.NONE); + + // Parse all properties and edges of a Vertex + Iterator iterator = entry.columns().iterator(); + for (int index = 0; iterator.hasNext(); index++) { + BackendColumn col = iterator.next(); + if (entry.type().isEdge()) { + // NOTE: the entry id type is vertex even if entry type is edge + // Parse vertex edges + this.parseColumn(col, vertex); + } else { + assert entry.type().isVertex(); + // Parse vertex properties + assert entry.columnsSize() >= 1 : entry.columnsSize(); + if (index == 0) { + this.parseVertex(col.value, vertex); + } else { + this.parseVertexOlap(col.value, vertex); + } + } + } + // convert to CIter + return new MapperIterator<>(vertex.getEdges().iterator(), + (edge) -> edge); + } + @Override public BackendEntry writeIndex(HugeIndex index) { BinaryBackendEntry entry; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/BytesBuffer.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/BytesBuffer.java index da66cc4f3e..52534bf0bd 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/BytesBuffer.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/BytesBuffer.java @@ -597,6 +597,10 @@ public void writeProperty(DataType dataType, Object value) { } } + public static byte getType(int value) { + return (byte) (value & 0x3f); + } + public Object readProperty(DataType dataType) { switch (dataType) { case BOOLEAN: @@ -752,11 +756,11 @@ public BytesBuffer writeIndexId(Id id, HugeType type, boolean withEnding) { public BinaryId readIndexId(HugeType type) { byte[] id; if (type.isRange4Index()) { - // IndexLabel 4 bytes + fieldValue 4 bytes - id = this.read(8); + // HugeTypeCode 1 byte + IndexLabel 4 bytes + fieldValue 4 bytes + id = this.read(9); } else if (type.isRange8Index()) { - // IndexLabel 4 bytes + fieldValue 8 bytes - id = this.read(12); + // HugeTypeCode 1 byte + IndexLabel 4 bytes + fieldValue 8 bytes + id = this.read(13); } else { assert type.isStringIndex(); id = this.readBytesWithEnding(); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/GraphSerializer.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/GraphSerializer.java index c67b597486..052c3e1196 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/GraphSerializer.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/GraphSerializer.java @@ -22,12 +22,14 @@ import org.apache.hugegraph.backend.query.ConditionQuery; import org.apache.hugegraph.backend.query.Query; import org.apache.hugegraph.backend.store.BackendEntry; +import org.apache.hugegraph.iterator.CIter; import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.structure.HugeEdge; import org.apache.hugegraph.structure.HugeEdgeProperty; import org.apache.hugegraph.structure.HugeIndex; import org.apache.hugegraph.structure.HugeVertex; import org.apache.hugegraph.structure.HugeVertexProperty; +import org.apache.tinkerpop.gremlin.structure.Edge; public interface GraphSerializer { @@ -44,6 +46,7 @@ public interface GraphSerializer { BackendEntry writeEdgeProperty(HugeEdgeProperty prop); HugeEdge readEdge(HugeGraph graph, BackendEntry entry); + CIter readEdges(HugeGraph graph, BackendEntry bytesEntry); BackendEntry writeIndex(HugeIndex index); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/TextSerializer.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/TextSerializer.java index 26efef6e2b..7a11d18e2f 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/TextSerializer.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/TextSerializer.java @@ -30,6 +30,7 @@ import org.apache.hugegraph.config.HugeConfig; import org.apache.commons.lang.NotImplementedException; +import org.apache.hugegraph.iterator.CIter; import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.util.JsonUtil; import org.apache.hugegraph.backend.id.EdgeId; @@ -66,6 +67,8 @@ import org.apache.hugegraph.type.define.SchemaStatus; import org.apache.hugegraph.type.define.WriteType; import org.apache.hugegraph.util.E; +import org.apache.tinkerpop.gremlin.structure.Edge; + import com.google.common.collect.ImmutableMap; public class TextSerializer extends AbstractSerializer { @@ -352,6 +355,13 @@ public HugeEdge readEdge(HugeGraph graph, BackendEntry backendEntry) { throw new NotImplementedException("Unsupported readEdge()"); } + @Override + public CIter readEdges(HugeGraph graph, BackendEntry bytesEntry) { + E.checkNotNull(graph, "serializer graph"); + // TODO: implement + throw new NotImplementedException("Unsupported readEdges()"); + } + @Override public BackendEntry writeIndex(HugeIndex index) { TextBackendEntry entry = newBackendEntry(index.type(), index.id()); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/AbstractBackendStore.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/AbstractBackendStore.java index 4e87ff589a..00fdcb929a 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/AbstractBackendStore.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/AbstractBackendStore.java @@ -80,6 +80,13 @@ public String toString() { protected abstract BackendTable table(HugeType type); + protected static HugeType convertTaskOrServerToVertex(HugeType type) { + if (HugeType.TASK.equals(type) || HugeType.SERVER.equals(type)) { + return HugeType.VERTEX; + } + return type; + } + // NOTE: Need to support passing null protected abstract Session session(HugeType type); } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendFeatures.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendFeatures.java index dc32653d74..53ca20bfb5 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendFeatures.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendFeatures.java @@ -31,6 +31,8 @@ default boolean supportsSnapshot() { return false; } + default boolean supportsTaskAndServerVertex() { return false; } + boolean supportsScanToken(); boolean supportsScanKeyPrefix(); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendMutation.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendMutation.java index 8004f987c4..7fb75f5918 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendMutation.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendMutation.java @@ -341,4 +341,8 @@ public void clear() { this.mutations.clear(); } } + + public Map>> mutations() { + return this.updates.mutations; + } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendStoreInfo.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendStoreInfo.java index 966d21fe88..77d4fd51bf 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendStoreInfo.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendStoreInfo.java @@ -41,9 +41,14 @@ public boolean exists() { } public boolean checkVersion() { + BackendStore store; + if (this.storeProvider.isHstore()) { + store = this.storeProvider.loadGraphStore(this.config); + } else { + store = this.storeProvider.loadSystemStore(this.config); + } String driverVersion = this.storeProvider.driverVersion(); - String storedVersion = this.storeProvider.loadSystemStore(this.config) - .storedVersion(); + String storedVersion = store.storedVersion(); if (!driverVersion.equals(storedVersion)) { LOG.error("The backend driver version '{}' is inconsistent with " + "the data version '{}' of backend store for graph '{}'", diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendStoreProvider.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendStoreProvider.java index a5e80510ed..0117e0bc74 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendStoreProvider.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/BackendStoreProvider.java @@ -73,4 +73,8 @@ public interface BackendStoreProvider { void onCloneConfig(HugeConfig config, String newGraph); void onDeleteConfig(HugeConfig config); + + default boolean isHstore() { + return "hstore".equals(type()); + } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/memory/InMemoryDBStore.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/memory/InMemoryDBStore.java index 4a088637a0..1e31a38d36 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/memory/InMemoryDBStore.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/store/memory/InMemoryDBStore.java @@ -27,10 +27,6 @@ import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.query.Query; import org.apache.hugegraph.backend.serializer.TextBackendEntry; -import org.apache.hugegraph.type.HugeType; -import org.apache.hugegraph.type.define.Action; -import org.slf4j.Logger; - import org.apache.hugegraph.backend.store.AbstractBackendStore; import org.apache.hugegraph.backend.store.BackendAction; import org.apache.hugegraph.backend.store.BackendEntry; @@ -39,7 +35,9 @@ import org.apache.hugegraph.backend.store.BackendSession; import org.apache.hugegraph.backend.store.BackendStoreProvider; import org.apache.hugegraph.config.HugeConfig; +import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.util.Log; +import org.slf4j.Logger; /** * NOTE: @@ -95,7 +93,7 @@ protected Collection tables() { @Override protected final InMemoryDBTable table(HugeType type) { assert type != null; - InMemoryDBTable table = this.tables.get(type); + InMemoryDBTable table = this.tables.get(convertTaskOrServerToVertex(type)); if (table == null) { throw new BackendException("Unsupported table type: %s", type); } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/GraphIndexTransaction.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/GraphIndexTransaction.java index bc0bc0be11..1ff4a9bb34 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/GraphIndexTransaction.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/GraphIndexTransaction.java @@ -191,7 +191,7 @@ private void updateVertexOlapIndex(HugeVertex vertex, boolean removed) { * @param removed remove or add index */ protected void updateIndex(Id ilId, HugeElement element, boolean removed) { - SchemaTransaction schema = this.params().schemaTransaction(); + ISchemaTransaction schema = this.params().schemaTransaction(); IndexLabel indexLabel = schema.getIndexLabel(ilId); E.checkArgument(indexLabel != null, "Not exist index label with id '%s'", ilId); @@ -730,7 +730,7 @@ private PageIds doIndexQueryOnce(IndexLabel indexLabel, @Watched(prefix = "index") private Set collectMatchedIndexes(ConditionQuery query) { - SchemaTransaction schema = this.params().schemaTransaction(); + ISchemaTransaction schema = this.params().schemaTransaction(); Id label = query.condition(HugeKeys.LABEL); List schemaLabels; @@ -780,7 +780,7 @@ private Set collectMatchedIndexes(ConditionQuery query) { @Watched(prefix = "index") private MatchedIndex collectMatchedIndex(SchemaLabel schemaLabel, ConditionQuery query) { - SchemaTransaction schema = this.params().schemaTransaction(); + ISchemaTransaction schema = this.params().schemaTransaction(); Set ils = InsertionOrderUtil.newSet(); for (Id il : schemaLabel.indexLabels()) { IndexLabel indexLabel = schema.getIndexLabel(il); @@ -1748,7 +1748,9 @@ protected long removeIndexLeft(ConditionQuery query, HugeElement element) { if (element.type() != HugeType.VERTEX && element.type() != HugeType.EDGE_OUT && - element.type() != HugeType.EDGE_IN) { + element.type() != HugeType.EDGE_IN && + element.type() != HugeType.TASK && + element.type() != HugeType.SERVER) { throw new HugeException("Only accept element of type VERTEX " + "and EDGE to remove left index, " + "but got: '%s'", element.type()); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/GraphTransaction.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/GraphTransaction.java index ea1196eaef..7b5289237b 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/GraphTransaction.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/GraphTransaction.java @@ -734,16 +734,50 @@ public Vertex queryVertex(Object vertexId) { return vertex; } + public Iterator queryTaskInfos(Query query) { + return this.queryVertices(query); + } + + public Iterator queryTaskInfos(Object... vertexIds) { + if (this.graph().backendStoreFeatures().supportsTaskAndServerVertex()) { + return this.queryVerticesByIds(vertexIds, false, false, + HugeType.TASK); + } + return this.queryVerticesByIds(vertexIds, false, false, + HugeType.VERTEX); + } + + public Iterator queryServerInfos(Query query) { + return this.queryVertices(query); + } + + public Iterator queryServerInfos(Object... vertexIds) { + if (this.graph().backendStoreFeatures().supportsTaskAndServerVertex()) { + return this.queryVerticesByIds(vertexIds, false, false, + HugeType.SERVER); + } + return this.queryVerticesByIds(vertexIds, false, false, + HugeType.VERTEX); + } + + protected Iterator queryVerticesByIds(Object[] vertexIds, + boolean adjacentVertex, + boolean checkMustExist) { + return this.queryVerticesByIds(vertexIds, adjacentVertex, checkMustExist, + HugeType.VERTEX); + } + protected Iterator queryVerticesByIds(Object[] vertexIds, boolean adjacentVertex, - boolean checkMustExist) { + boolean checkMustExist, + HugeType type) { Query.checkForceCapacity(vertexIds.length); // NOTE: allowed duplicated vertices if query by duplicated ids List ids = InsertionOrderUtil.newList(); Map vertices = new HashMap<>(vertexIds.length); - IdQuery query = new IdQuery(HugeType.VERTEX); + IdQuery query = new IdQuery(type); for (Object vertexId : vertexIds) { HugeVertex vertex; Id id = HugeVertex.getIdValue(vertexId); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/ISchemaTransaction.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/ISchemaTransaction.java new file mode 100644 index 0000000000..1f77e55ad9 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/ISchemaTransaction.java @@ -0,0 +1,92 @@ +package org.apache.hugegraph.backend.tx; + +import java.util.Collection; +import java.util.List; +import java.util.Set; + +import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.schema.EdgeLabel; +import org.apache.hugegraph.schema.IndexLabel; +import org.apache.hugegraph.schema.PropertyKey; +import org.apache.hugegraph.schema.SchemaElement; +import org.apache.hugegraph.schema.SchemaLabel; +import org.apache.hugegraph.schema.VertexLabel; +import org.apache.hugegraph.type.HugeType; +import org.apache.hugegraph.type.define.GraphMode; +import org.apache.hugegraph.type.define.SchemaStatus; + +public interface ISchemaTransaction { + List getPropertyKeys(); + + Id removePropertyKey(Id pkey); + + PropertyKey getPropertyKey(Id id); + + PropertyKey getPropertyKey(String name); + + Id clearOlapPk(PropertyKey propertyKey); + + void addVertexLabel(VertexLabel label); + + void updateVertexLabel(VertexLabel label); + + Id removeVertexLabel(Id label); + + List getVertexLabels(); + + VertexLabel getVertexLabel(Id id); + + VertexLabel getVertexLabel(String name); + + List getEdgeLabels(); + + Id addPropertyKey(PropertyKey pkey); + + void updatePropertyKey(PropertyKey pkey); + + void updateEdgeLabel(EdgeLabel label); + + void addEdgeLabel(EdgeLabel label); + + Id removeEdgeLabel(Id id); + + EdgeLabel getEdgeLabel(Id id); + + EdgeLabel getEdgeLabel(String name); + + void addIndexLabel(SchemaLabel schemaLabel, IndexLabel indexLabel); + + void updateIndexLabel(IndexLabel label); + + Id removeIndexLabel(Id id); + + Id rebuildIndex(SchemaElement schema); + + Id rebuildIndex(SchemaElement schema, Set dependencies); + + List getIndexLabels(); + + IndexLabel getIndexLabel(Id id); + + IndexLabel getIndexLabel(String name); + + void close(); + + Id getNextId(HugeType type); + + Id validOrGenerateId(HugeType type, Id id, String name); + + void checkSchemaName(String name); + + String graphName(); + + void updateSchemaStatus(SchemaElement element, SchemaStatus status); + + GraphMode graphMode(); + + boolean existsSchemaId(HugeType type, Id id); + + void removeIndexLabelFromBaseLabel(IndexLabel indexLabel); + + void createIndexLabelForOlapPk(PropertyKey propertyKey); +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/IdCounter.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/IdCounter.java new file mode 100644 index 0000000000..87a10ce2c8 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/IdCounter.java @@ -0,0 +1,142 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.backend.tx; + +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicLong; + +import org.apache.hugegraph.backend.BackendException; +import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.backend.id.IdGenerator; +import org.apache.hugegraph.pd.client.PDClient; +import org.apache.hugegraph.pd.grpc.Pdpb; +import org.apache.hugegraph.store.term.HgPair; +import org.apache.hugegraph.type.HugeType; +import org.apache.hugegraph.util.E; + +public class IdCounter { + + private static final int TIMES = 10000; + private static final int DELTA = 10000; + private static final String DELIMITER = "/"; + private static final Map> ids = + new ConcurrentHashMap<>(); + private final PDClient pdClient; + private final String graphName; + + public IdCounter(PDClient pdClient, String graphName) { + this.graphName = graphName; + this.pdClient = pdClient; + } + + public Id nextId(HugeType type) { + long counter = this.getCounter(type); + E.checkState(counter != 0L, "Please check whether '%s' is OK", + this.pdClient.toString()); + return IdGenerator.of(counter); + } + + public void setCounterLowest(HugeType type, long lowest) { + long current = this.getCounter(type); + if (current >= lowest) { + return; + } + long increment = lowest - current; + this.increaseCounter(type, increment); + } + + public long getCounter(HugeType type) { + return this.getCounterFromPd(type); + } + + public synchronized void increaseCounter(HugeType type, long lowest) { + String key = toKey(this.graphName, type); + getCounterFromPd(type); + HgPair idPair = ids.get(key); + AtomicLong currentId = idPair.getKey(); + AtomicLong maxId = idPair.getValue(); + if (currentId.longValue() >= lowest) { + return; + } + if (maxId.longValue() >= lowest) { + currentId.set(lowest); + return; + } + synchronized (ids) { + try { + this.pdClient.getIdByKey(key, (int) (lowest - maxId.longValue())); + ids.remove(key); + } catch (Exception e) { + throw new BackendException(e); + } + } + } + + protected String toKey(String graphName, HugeType type) { + return new StringBuilder().append(graphName) + .append(DELIMITER) + .append(type.code()).toString(); + } + + public long getCounterFromPd(HugeType type) { + AtomicLong currentId; + AtomicLong maxId; + HgPair idPair; + String key = toKey(this.graphName, type); + if ((idPair = ids.get(key)) == null) { + synchronized (ids) { + if ((idPair = ids.get(key)) == null) { + try { + currentId = new AtomicLong(0); + maxId = new AtomicLong(0); + idPair = new HgPair<>(currentId, maxId); + ids.put(key, idPair); + } catch (Exception e) { + throw new BackendException(String.format( + "Failed to get the ID from pd,%s", e)); + } + } + } + } + currentId = idPair.getKey(); + maxId = idPair.getValue(); + for (int i = 0; i < TIMES; i++) { + synchronized (currentId) { + if ((currentId.incrementAndGet()) <= maxId.longValue()) { + return currentId.longValue(); + } + if (currentId.longValue() > maxId.longValue()) { + try { + Pdpb.GetIdResponse idByKey = pdClient.getIdByKey(key, DELTA); + idPair.getValue().getAndSet(idByKey.getId() + + idByKey.getDelta()); + idPair.getKey().getAndSet(idByKey.getId()); + } catch (Exception e) { + throw new BackendException(String.format( + "Failed to get the ID from pd,%s", e)); + } + } + } + } + E.checkArgument(false, + "Having made too many attempts to get the" + + " ID for type '%s'", type.name()); + return 0L; + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/SchemaTransaction.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/SchemaTransaction.java index 82ec7a20e2..69b13869a2 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/SchemaTransaction.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/SchemaTransaction.java @@ -68,7 +68,7 @@ import org.apache.hugegraph.util.LockUtil; import com.google.common.collect.ImmutableSet; -public class SchemaTransaction extends IndexableTransaction { +public class SchemaTransaction extends IndexableTransaction implements ISchemaTransaction { private final SchemaIndexTransaction indexTx; private final SystemSchemaStore systemSchemaStore; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/SchemaTransactionV2.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/SchemaTransactionV2.java new file mode 100644 index 0000000000..8aa9272d6e --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/backend/tx/SchemaTransactionV2.java @@ -0,0 +1,735 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.backend.tx; + +import java.util.List; +import java.util.Set; +import java.util.function.Consumer; + +import org.apache.hugegraph.HugeGraph; +import org.apache.hugegraph.HugeGraphParams; +import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.backend.id.IdGenerator; +import org.apache.hugegraph.config.CoreOptions; +import org.apache.hugegraph.exception.NotAllowException; +import org.apache.hugegraph.job.JobBuilder; +import org.apache.hugegraph.job.schema.EdgeLabelRemoveJob; +import org.apache.hugegraph.job.schema.IndexLabelRebuildJob; +import org.apache.hugegraph.job.schema.IndexLabelRemoveJob; +import org.apache.hugegraph.job.schema.OlapPropertyKeyClearJob; +import org.apache.hugegraph.job.schema.OlapPropertyKeyCreateJob; +import org.apache.hugegraph.job.schema.OlapPropertyKeyRemoveJob; +import org.apache.hugegraph.job.schema.SchemaJob; +import org.apache.hugegraph.job.schema.VertexLabelRemoveJob; +import org.apache.hugegraph.meta.MetaDriver; +import org.apache.hugegraph.meta.MetaManager; +import org.apache.hugegraph.meta.PdMetaDriver; +import org.apache.hugegraph.meta.managers.SchemaMetaManager; +import org.apache.hugegraph.perf.PerfUtil.Watched; +import org.apache.hugegraph.schema.EdgeLabel; +import org.apache.hugegraph.schema.IndexLabel; +import org.apache.hugegraph.schema.PropertyKey; +import org.apache.hugegraph.schema.SchemaElement; +import org.apache.hugegraph.schema.SchemaLabel; +import org.apache.hugegraph.schema.Userdata; +import org.apache.hugegraph.schema.VertexLabel; +import org.apache.hugegraph.task.HugeTask; +import org.apache.hugegraph.type.HugeType; +import org.apache.hugegraph.type.define.GraphMode; +import org.apache.hugegraph.type.define.SchemaStatus; +import org.apache.hugegraph.type.define.WriteType; +import org.apache.hugegraph.util.DateUtil; +import org.apache.hugegraph.util.E; +import org.apache.hugegraph.util.LockUtil; +import org.apache.hugegraph.util.Log; +import org.apache.tinkerpop.gremlin.structure.Graph; +import org.slf4j.Logger; + +import com.google.common.collect.ImmutableSet; + +public class SchemaTransactionV2 implements ISchemaTransaction { + + protected static final Logger LOG = Log.logger(SchemaTransaction.class); + + private final String graphSpace; + private final String graph; + private final HugeGraphParams graphParams; + private final IdCounter idCounter; + private final SchemaMetaManager schemaMetaManager; + + public SchemaTransactionV2(MetaDriver metaDriver, + String cluster, + HugeGraphParams graphParams) { + E.checkNotNull(graphParams, "graphParams"); + this.graphParams = graphParams; + // TODO: uncomment later - graph space + //this.graphSpace = graphParams.graph().graphSpace(); + this.graphSpace = ""; + this.graph = graphParams.name(); + this.schemaMetaManager = + new SchemaMetaManager(metaDriver, cluster, this.graph()); + this.idCounter = new IdCounter(((PdMetaDriver) metaDriver).pdClient(), + idKeyName(this.graphSpace, this.graph)); + } + + private static void setCreateTimeIfNeeded(SchemaElement schema) { + if (!schema.userdata().containsKey(Userdata.CREATE_TIME)) { + schema.userdata(Userdata.CREATE_TIME, DateUtil.now()); + } + } + + /** + * 异步任务系列 + */ + private static Id asyncRun(HugeGraph graph, SchemaElement schema, + SchemaJob job) { + return asyncRun(graph, schema, job, ImmutableSet.of()); + } + + @Watched(prefix = "schema") + private static Id asyncRun(HugeGraph graph, SchemaElement schema, + SchemaJob job, Set dependencies) { + E.checkArgument(schema != null, "Schema can't be null"); + String name = SchemaJob.formatTaskName(schema.type(), + schema.id(), + schema.name()); + + JobBuilder builder = JobBuilder.of(graph).name(name) + .job(job) + .dependencies(dependencies); + HugeTask task = builder.schedule(); + // If TASK_SYNC_DELETION is true, wait async thread done before + // continue. This is used when running tests. + if (graph.option(CoreOptions.TASK_SYNC_DELETION)) { + task.syncWait(); + } + return task.id(); + } + + public String idKeyName(String graphSpace, String graph) { + // {graphSpace}/{graph}/m "m" means "schema" + return String.join("/", graphSpace, graph, "m"); + } + + @Watched(prefix = "schema") + public List getPropertyKeys(boolean cache) { + return this.getAllSchema(HugeType.PROPERTY_KEY); + } + + @Watched(prefix = "schema") + public List getPropertyKeys() { + return this.getAllSchema(HugeType.PROPERTY_KEY); + } + + @Watched(prefix = "schema") + public List getVertexLabels() { + return this.getAllSchema(HugeType.VERTEX_LABEL); + } + + @Watched(prefix = "schema") + public List getEdgeLabels() { + return this.getAllSchema(HugeType.EDGE_LABEL); + } + + @Watched(prefix = "schema") + public List getIndexLabels() { + return this.getAllSchema(HugeType.INDEX_LABEL); + } + + @Watched(prefix = "schema") + public Id addPropertyKey(PropertyKey propertyKey) { + this.addSchema(propertyKey); + if (!propertyKey.olap()) { + return IdGenerator.ZERO; + } + return this.createOlapPk(propertyKey); + } + + @Watched(prefix = "schema") + public void updatePropertyKey(PropertyKey propertyKey) { + this.updateSchema(propertyKey, null); + } + + public void updatePropertyKey(PropertyKey old, PropertyKey update) { + this.removePropertyKey(old.id()); + this.addPropertyKey(update); + } + + @Watched(prefix = "schema") + public PropertyKey getPropertyKey(Id id) { + E.checkArgumentNotNull(id, "Property key id can't be null"); + return this.getSchema(HugeType.PROPERTY_KEY, id); + } + + @Watched(prefix = "schema") + public PropertyKey getPropertyKey(String name) { + E.checkArgumentNotNull(name, "Property key name can't be null"); + E.checkArgument(!name.isEmpty(), "Property key name can't be empty"); + return this.getSchema(HugeType.PROPERTY_KEY, name); + } + + @Watched(prefix = "schema") + public Id removePropertyKey(Id id) { + LOG.debug("SchemaTransaction remove property key '{}'", id); + PropertyKey propertyKey = this.getPropertyKey(id); + // If the property key does not exist, return directly + if (propertyKey == null) { + return null; + } + + List vertexLabels = this.getVertexLabels(); + for (VertexLabel vertexLabel : vertexLabels) { + if (vertexLabel.properties().contains(id)) { + throw new NotAllowException( + "Not allowed to remove property key: '%s' " + + "because the vertex label '%s' is still using it.", + propertyKey, vertexLabel.name()); + } + } + + List edgeLabels = this.getEdgeLabels(); + for (EdgeLabel edgeLabel : edgeLabels) { + if (edgeLabel.properties().contains(id)) { + throw new NotAllowException( + "Not allowed to remove property key: '%s' " + + "because the edge label '%s' is still using it.", + propertyKey, edgeLabel.name()); + } + } + if (propertyKey.oltp()) { + this.removeSchema(propertyKey); + return IdGenerator.ZERO; + } else { + return this.removeOlapPk(propertyKey); + } + } + + @Watched(prefix = "schema") + public void addVertexLabel(VertexLabel vertexLabel) { + this.addSchema(vertexLabel); + } + + @Watched(prefix = "schema") + public void updateVertexLabel(VertexLabel vertexLabel) { + this.updateSchema(vertexLabel, null); + } + + @Watched(prefix = "schema") + public VertexLabel getVertexLabel(Id id) { + E.checkArgumentNotNull(id, "Vertex label id can't be null"); + if (SchemaElement.OLAP_ID.equals(id)) { + return VertexLabel.OLAP_VL; + } + return this.getSchema(HugeType.VERTEX_LABEL, id); + } + + @Watched(prefix = "schema") + public VertexLabel getVertexLabel(String name) { + E.checkArgumentNotNull(name, "Vertex label name can't be null"); + E.checkArgument(!name.isEmpty(), "Vertex label name can't be empty"); + if (SchemaElement.OLAP.equals(name)) { + return VertexLabel.OLAP_VL; + } + return this.getSchema(HugeType.VERTEX_LABEL, name); + } + + @Watched(prefix = "schema") + public Id removeVertexLabel(Id id) { + LOG.debug("SchemaTransaction remove vertex label '{}'", id); + SchemaJob job = new VertexLabelRemoveJob(); + VertexLabel schema = this.getVertexLabel(id); + return asyncRun(this.graph(), schema, job); + } + + @Watched(prefix = "schema") + public void addEdgeLabel(EdgeLabel edgeLabel) { + this.addSchema(edgeLabel); + } + + @Watched(prefix = "schema") + public void updateEdgeLabel(EdgeLabel edgeLabel) { + this.updateSchema(edgeLabel, null); + } + + @Watched(prefix = "schema") + public EdgeLabel getEdgeLabel(Id id) { + E.checkArgumentNotNull(id, "Edge label id can't be null"); + return this.getSchema(HugeType.EDGE_LABEL, id); + } + + @Watched(prefix = "schema") + public EdgeLabel getEdgeLabel(String name) { + E.checkArgumentNotNull(name, "Edge label name can't be null"); + E.checkArgument(!name.isEmpty(), "Edge label name can't be empty"); + return this.getSchema(HugeType.EDGE_LABEL, name); + } + + @Watched(prefix = "schema") + public Id removeEdgeLabel(Id id) { + /* + * Call an asynchronous task and call back the corresponding + * removeSchema() method after the task ends to complete the delete + * schema operation + */ + LOG.debug("SchemaTransaction remove edge label '{}'", id); + EdgeLabel schema = this.getEdgeLabel(id); + // TODO: uncomment later - el + //if (schema.edgeLabelType().parent()) { + // List edgeLabels = this.getEdgeLabels(); + // for (EdgeLabel edgeLabel : edgeLabels) { + // if (edgeLabel.edgeLabelType().sub() && + // edgeLabel.fatherId() == id) { + // throw new NotAllowException( + // "Not allowed to remove a parent edge label: '%s' " + + // "because the sub edge label '%s' is still existing", + // schema.name(), edgeLabel.name()); + // } + // } + //} + SchemaJob job = new EdgeLabelRemoveJob(); + return asyncRun(this.graph(), schema, job); + } + + @Watched(prefix = "schema") + public void addIndexLabel(SchemaLabel baseLabel, IndexLabel indexLabel) { + /* + * Create index and update index name in base-label(VL/EL) + * TODO: should wrap update base-label and create index in one tx. + */ + this.addSchema(indexLabel); + + if (baseLabel.equals(VertexLabel.OLAP_VL)) { + return; + } + + this.updateSchema(baseLabel, schema -> { + // NOTE: Do schema update in the lock block + baseLabel.addIndexLabel(indexLabel.id()); + }); + } + + @Watched(prefix = "schema") + public void updateIndexLabel(IndexLabel indexLabel) { + this.updateSchema(indexLabel, null); + } + + @Watched(prefix = "schema") + public IndexLabel getIndexLabel(Id id) { + E.checkArgumentNotNull(id, "Index label id can't be null"); + return this.getSchema(HugeType.INDEX_LABEL, id); + } + + @Watched(prefix = "schema") + public IndexLabel getIndexLabel(String name) { + E.checkArgumentNotNull(name, "Index label name can't be null"); + E.checkArgument(!name.isEmpty(), "Index label name can't be empty"); + return this.getSchema(HugeType.INDEX_LABEL, name); + } + + @Override + public void close() { + + } + + @Watched(prefix = "schema") + public Id removeIndexLabel(Id id) { + LOG.debug("SchemaTransaction remove index label '{}'", id); + SchemaJob job = new IndexLabelRemoveJob(); + IndexLabel schema = this.getIndexLabel(id); + return asyncRun(this.graph(), schema, job); + } + + // 通用性 的schema处理函数 + @Watched(prefix = "schema") + public void updateSchemaStatus(SchemaElement schema, SchemaStatus status) { + if (!this.existsSchemaId(schema.type(), schema.id())) { + LOG.warn("Can't update schema '{}', it may be deleted", schema); + return; + } + + this.updateSchema(schema, schemaToUpdate -> { + // NOTE: Do schema update in the lock block + schema.status(status); + }); + } + + @Watched(prefix = "schema") + public boolean existsSchemaId(HugeType type, Id id) { + return this.getSchema(type, id) != null; + } + + @Override + public void removeIndexLabelFromBaseLabel(IndexLabel indexLabel) { + + } + + protected void updateSchema(SchemaElement schema, + Consumer updateCallback) { + LOG.debug("SchemaTransaction update {} with id '{}'", + schema.type(), schema.id()); + this.saveSchema(schema, true, updateCallback); + } + + protected void addSchema(SchemaElement schema) { + LOG.debug("SchemaTransaction add {} with id '{}'", + schema.type(), schema.id()); + setCreateTimeIfNeeded(schema); + this.saveSchema(schema, false, null); + } + + @SuppressWarnings("unchecked") + private void saveSchema(SchemaElement schema, boolean update, + Consumer updateCallback) { + // Lock for schema update + // TODO: uncomment later - graph space + //String spaceGraph = this.graphParams() + // .graph().spaceGraphName(); + LockUtil.Locks locks = new LockUtil.Locks(graph); + try { + locks.lockWrites(LockUtil.hugeType2Group(schema.type()), schema.id()); + + if (updateCallback != null) { + // NOTE: Do schema update in the lock block + updateCallback.accept(schema); + } + // 调对应的方法 + switch (schema.type()) { + case PROPERTY_KEY: + this.schemaMetaManager.addPropertyKey(this.graphSpace, + this.graph, + (PropertyKey) schema); + break; + case VERTEX_LABEL: + this.schemaMetaManager.addVertexLabel(this.graphSpace, + this.graph, + (VertexLabel) schema); + // 点的label发生变化, 清空对应图的点缓存信息 + MetaManager.instance().notifyGraphVertexCacheClear(this.graphSpace, this.graph); + break; + case EDGE_LABEL: + this.schemaMetaManager.addEdgeLabel(this.graphSpace, + this.graph, + (EdgeLabel) schema); + // 边的label发生变化, 清空对应图的边缓存信息 + MetaManager.instance().notifyGraphEdgeCacheClear(this.graphSpace, this.graph); + break; + case INDEX_LABEL: + this.schemaMetaManager.addIndexLabel(this.graphSpace, + this.graph, + (IndexLabel) schema); + break; + default: + throw new AssertionError(String.format( + "Invalid key '%s' for saveSchema", schema.type())); + } + } finally { + locks.unlock(); + } + } + + @SuppressWarnings("unchecked") + protected T getSchema(HugeType type, Id id) { + LOG.debug("SchemaTransaction get {} by id '{}'", + type.readableName(), id); + switch (type) { + case PROPERTY_KEY: + return (T) this.schemaMetaManager.getPropertyKey(this.graphSpace, + this.graph, id); + case VERTEX_LABEL: + return (T) this.schemaMetaManager.getVertexLabel(this.graphSpace, + this.graph, id); + case EDGE_LABEL: + return (T) this.schemaMetaManager.getEdgeLabel(this.graphSpace, + this.graph, id); + case INDEX_LABEL: + return (T) this.schemaMetaManager.getIndexLabel(this.graphSpace, + this.graph, id); + default: + throw new AssertionError(String.format( + "Invalid type '%s' for getSchema", type)); + } + } + + /** + * Currently doesn't allow to exist schema with the same name + * + * @param type the query schema type + * @param name the query schema name + * @param SubClass of SchemaElement + * @return the queried schema object + */ + @SuppressWarnings("unchecked") + protected T getSchema(HugeType type, String name) { + LOG.debug("SchemaTransaction get {} by name '{}'", + type.readableName(), name); + switch (type) { + case PROPERTY_KEY: + return (T) this.schemaMetaManager.getPropertyKey(this.graphSpace, + this.graph, name); + case VERTEX_LABEL: + return (T) this.schemaMetaManager.getVertexLabel(this.graphSpace, + this.graph, name); + case EDGE_LABEL: + return (T) this.schemaMetaManager.getEdgeLabel(this.graphSpace, + this.graph, name); + case INDEX_LABEL: + return (T) this.schemaMetaManager.getIndexLabel(this.graphSpace, + this.graph, name); + default: + throw new AssertionError(String.format( + "Invalid type '%s' for getSchema", type)); + } + } + + @SuppressWarnings("unchecked") + protected List getAllSchema(HugeType type) { + LOG.debug("SchemaTransaction getAllSchema {}", type.readableName()); + switch (type) { + case PROPERTY_KEY: + return (List) this.schemaMetaManager.getPropertyKeys(this.graphSpace, + this.graph); + case VERTEX_LABEL: + return (List) this.schemaMetaManager.getVertexLabels(this.graphSpace, + this.graph); + case EDGE_LABEL: + return (List) this.schemaMetaManager.getEdgeLabels(this.graphSpace, this.graph); + case INDEX_LABEL: + return (List) this.schemaMetaManager.getIndexLabels(this.graphSpace, this.graph); + default: + throw new AssertionError(String.format( + "Invalid type '%s' for getSchema", type)); + } + } + + protected void removeSchema(SchemaElement schema) { + LOG.debug("SchemaTransaction remove {} by id '{}'", + schema.type(), schema.id()); + // TODO: uncomment later - graph space + //String spaceGraph = this.graphParams() + // .graph().spaceGraphName(); + LockUtil.Locks locks = new LockUtil.Locks(graph); + try { + locks.lockWrites(LockUtil.hugeType2Group(schema.type()), + schema.id()); + switch (schema.type()) { + case PROPERTY_KEY: + this.schemaMetaManager.removePropertyKey(this.graphSpace, this.graph, + schema.id()); + break; + case VERTEX_LABEL: + this.schemaMetaManager.removeVertexLabel(this.graphSpace, this.graph, + schema.id()); + break; + case EDGE_LABEL: + this.schemaMetaManager.removeEdgeLabel(this.graphSpace, this.graph, + schema.id()); + break; + case INDEX_LABEL: + this.schemaMetaManager.removeIndexLabel(this.graphSpace, this.graph, + schema.id()); + break; + default: + throw new AssertionError(String.format( + "Invalid key '%s' for saveSchema", schema.type())); + } + } finally { + locks.unlock(); + } + } + + // olap 相关的方法 + public void createIndexLabelForOlapPk(PropertyKey propertyKey) { + WriteType writeType = propertyKey.writeType(); + if (writeType == WriteType.OLTP || + writeType == WriteType.OLAP_COMMON) { + return; + } + + String indexName = SchemaElement.OLAP + "_by_" + propertyKey.name(); + IndexLabel.Builder builder = this.graph().schema() + .indexLabel(indexName) + .onV(SchemaElement.OLAP) + .by(propertyKey.name()); + if (propertyKey.writeType() == WriteType.OLAP_SECONDARY) { + builder.secondary(); + } else { + assert propertyKey.writeType() == WriteType.OLAP_RANGE; + builder.range(); + } + builder.build(); + this.graph().addIndexLabel(VertexLabel.OLAP_VL, builder.build()); + } + + public Id removeOlapPk(PropertyKey propertyKey) { + LOG.debug("SchemaTransaction remove olap property key {} with id '{}'", + propertyKey.name(), propertyKey.id()); + SchemaJob job = new OlapPropertyKeyRemoveJob(); + return asyncRun(this.graph(), propertyKey, job); + } + + public void removeOlapPk(Id id) { + this.graphParams().loadGraphStore().removeOlapTable(id); + } + + public Id clearOlapPk(PropertyKey propertyKey) { + LOG.debug("SchemaTransaction clear olap property key {} with id '{}'", + propertyKey.name(), propertyKey.id()); + SchemaJob job = new OlapPropertyKeyClearJob(); + return asyncRun(this.graph(), propertyKey, job); + } + + public void clearOlapPk(Id id) { + this.graphParams().loadGraphStore().clearOlapTable(id); + } + + public Id createOlapPk(PropertyKey propertyKey) { + LOG.debug("SchemaTransaction create olap property key {} with id '{}'", + propertyKey.name(), propertyKey.id()); + SchemaJob job = new OlapPropertyKeyCreateJob(); + return asyncRun(this.graph(), propertyKey, job); + } + + // -- store 相关的方法,分为两类:1、olaptable相关 2、id生成策略 + // - 1、olaptable相关 + public void createOlapPk(Id id) { + this.graphParams().loadGraphStore().createOlapTable(id); + } + + // TODO: uncomment later - olap + //public boolean existOlapTable(Id id) { + // return this.graphParams().loadGraphStore().existOlapTable(id); + //} + + public void initAndRegisterOlapTables() { + for (PropertyKey pk : this.getPropertyKeys()) { + if (pk.olap()) { + this.graphParams().loadGraphStore().checkAndRegisterOlapTable(pk.id()); + } + } + } + + // - 2、id生成策略 + @Watched(prefix = "schema") + public Id getNextId(HugeType type) { + LOG.debug("SchemaTransaction get next id for {}", type); + return this.idCounter.nextId(type); + } + + @Watched(prefix = "schema") + public void setNextIdLowest(HugeType type, long lowest) { + LOG.debug("SchemaTransaction set next id to {} for {}", lowest, type); + this.idCounter.setCounterLowest(type, lowest); + } + + @Watched(prefix = "schema") + public Id getNextSystemId() { + LOG.debug("SchemaTransaction get next system id"); + Id id = this.idCounter.nextId(HugeType.SYS_SCHEMA); + return IdGenerator.of(-id.asLong()); + } + + @Watched(prefix = "schema") + public Id validOrGenerateId(HugeType type, Id id, String name) { + boolean forSystem = Graph.Hidden.isHidden(name); + if (id != null) { + this.checkIdAndUpdateNextId(type, id, name, forSystem); + } else { + if (forSystem) { + id = this.getNextSystemId(); + } else { + id = this.getNextId(type); + } + } + return id; + } + + private void checkIdAndUpdateNextId(HugeType type, Id id, + String name, boolean forSystem) { + if (forSystem) { + if (id.number() && id.asLong() < 0) { + return; + } + throw new IllegalStateException(String.format( + "Invalid system id '%s'", id)); + } + E.checkState(id.number() && id.asLong() > 0L, + "Schema id must be number and >0, but got '%s'", id); + GraphMode mode = this.graphMode(); + E.checkState(mode == GraphMode.RESTORING, + "Can't build schema with provided id '%s' " + + "when graph '%s' in mode '%s'", id, this.graph, mode); + this.setNextIdLowest(type, id.asLong()); + } + + // 功能型函数 + public void checkSchemaName(String name) { + String illegalReg = this.graphParams().configuration() + .get(CoreOptions.SCHEMA_ILLEGAL_NAME_REGEX); + E.checkNotNull(name, "name"); + E.checkArgument(!name.isEmpty(), "The name can't be empty."); + E.checkArgument(name.length() < 256, + "The length of name must less than 256 bytes."); + E.checkArgument(!name.matches(illegalReg), + "Illegal schema name '%s'", name); + + final char[] filters = {'#', '>', ':', '!'}; + for (char c : filters) { + E.checkArgument(name.indexOf(c) == -1, + "The name can't contain character '%s'.", c); + } + } + + @Override + public String graphName() { + return this.graph; + } + + protected HugeGraphParams graphParams() { + return this.graphParams; + } + + public GraphMode graphMode() { + return this.graphParams().mode(); + } + + // 获取字段的方法 + public HugeGraph graph() { + return this.graphParams.graph(); + } + + // 重建索引 + @Watched(prefix = "schema") + public Id rebuildIndex(SchemaElement schema) { + return this.rebuildIndex(schema, ImmutableSet.of()); + } + + @Watched(prefix = "schema") + public Id rebuildIndex(SchemaElement schema, Set dependencies) { + LOG.debug("SchemaTransaction rebuild index for {} with id '{}'", + schema.type(), schema.id()); + SchemaJob job = new IndexLabelRebuildJob(); + return asyncRun(this.graph(), schema, job, dependencies); + } + + /** + * 清除所有的schema信息 + */ + public void clear() { + this.schemaMetaManager.clearAllSchema(this.graphSpace, graph); + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/EdgeLabelRemoveJob.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/EdgeLabelRemoveJob.java index af2fb65f78..e05563b868 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/EdgeLabelRemoveJob.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/EdgeLabelRemoveJob.java @@ -21,6 +21,7 @@ import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.tx.GraphTransaction; +import org.apache.hugegraph.backend.tx.ISchemaTransaction; import org.apache.hugegraph.backend.tx.SchemaTransaction; import org.apache.hugegraph.schema.EdgeLabel; import org.apache.hugegraph.type.define.SchemaStatus; @@ -43,7 +44,7 @@ public Object execute() { private static void removeEdgeLabel(HugeGraphParams graph, Id id) { GraphTransaction graphTx = graph.graphTransaction(); - SchemaTransaction schemaTx = graph.schemaTransaction(); + ISchemaTransaction schemaTx = graph.schemaTransaction(); EdgeLabel edgeLabel = schemaTx.getEdgeLabel(id); // If the edge label does not exist, return directly if (edgeLabel == null) { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/IndexLabelRebuildJob.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/IndexLabelRebuildJob.java index 7e047a0632..0c5624af1a 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/IndexLabelRebuildJob.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/IndexLabelRebuildJob.java @@ -24,6 +24,7 @@ import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.tx.GraphTransaction; +import org.apache.hugegraph.backend.tx.ISchemaTransaction; import org.apache.hugegraph.backend.tx.SchemaTransaction; import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.type.define.SchemaStatus; @@ -83,7 +84,7 @@ private void rebuildIndex(SchemaElement schema) { } private void rebuildIndex(SchemaLabel label, Collection indexLabelIds) { - SchemaTransaction schemaTx = this.params().schemaTransaction(); + ISchemaTransaction schemaTx = this.params().schemaTransaction(); GraphTransaction graphTx = this.params().graphTransaction(); Consumer indexUpdater = (elem) -> { @@ -148,7 +149,7 @@ private void rebuildIndex(SchemaLabel label, Collection indexLabelIds) { } private void removeIndex(Collection indexLabelIds) { - SchemaTransaction schemaTx = this.params().schemaTransaction(); + ISchemaTransaction schemaTx = this.params().schemaTransaction(); GraphTransaction graphTx = this.params().graphTransaction(); for (Id id : indexLabelIds) { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/IndexLabelRemoveJob.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/IndexLabelRemoveJob.java index 113507c994..f5c52348cc 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/IndexLabelRemoveJob.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/IndexLabelRemoveJob.java @@ -19,6 +19,7 @@ import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.tx.GraphTransaction; +import org.apache.hugegraph.backend.tx.ISchemaTransaction; import org.apache.hugegraph.backend.tx.SchemaTransaction; import org.apache.hugegraph.schema.IndexLabel; import org.apache.hugegraph.type.define.SchemaStatus; @@ -40,7 +41,7 @@ public Object execute() { protected static void removeIndexLabel(HugeGraphParams graph, Id id) { GraphTransaction graphTx = graph.graphTransaction(); - SchemaTransaction schemaTx = graph.schemaTransaction(); + ISchemaTransaction schemaTx = graph.schemaTransaction(); IndexLabel indexLabel = schemaTx.getIndexLabel(id); // If the index label does not exist, return directly if (indexLabel == null) { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/OlapPropertyKeyClearJob.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/OlapPropertyKeyClearJob.java index f28f0af276..af47ae00cf 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/OlapPropertyKeyClearJob.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/OlapPropertyKeyClearJob.java @@ -19,6 +19,7 @@ import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.tx.GraphTransaction; +import org.apache.hugegraph.backend.tx.ISchemaTransaction; import org.apache.hugegraph.backend.tx.SchemaTransaction; import org.apache.hugegraph.schema.IndexLabel; import org.apache.hugegraph.type.define.SchemaStatus; @@ -50,7 +51,7 @@ protected static void clearIndexLabel(HugeGraphParams graph, Id id) { return; } GraphTransaction graphTx = graph.graphTransaction(); - SchemaTransaction schemaTx = graph.schemaTransaction(); + ISchemaTransaction schemaTx = graph.schemaTransaction(); IndexLabel indexLabel = schemaTx.getIndexLabel(olapIndexLabel); // If the index label does not exist, return directly if (indexLabel == null) { @@ -80,7 +81,7 @@ protected static void clearIndexLabel(HugeGraphParams graph, Id id) { } protected static Id findOlapIndexLabel(HugeGraphParams graph, Id olap) { - SchemaTransaction schemaTx = graph.schemaTransaction(); + ISchemaTransaction schemaTx = graph.schemaTransaction(); for (IndexLabel indexLabel : schemaTx.getIndexLabels()) { if (indexLabel.indexFields().contains(olap)) { return indexLabel.id(); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/OlapPropertyKeyCreateJob.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/OlapPropertyKeyCreateJob.java index 578f874734..db56515318 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/OlapPropertyKeyCreateJob.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/OlapPropertyKeyCreateJob.java @@ -17,6 +17,7 @@ package org.apache.hugegraph.job.schema; +import org.apache.hugegraph.backend.tx.ISchemaTransaction; import org.apache.hugegraph.backend.tx.SchemaTransaction; import org.apache.hugegraph.schema.PropertyKey; @@ -29,7 +30,7 @@ public String type() { @Override public Object execute() { - SchemaTransaction schemaTx = this.params().schemaTransaction(); + ISchemaTransaction schemaTx = this.params().schemaTransaction(); PropertyKey propertyKey = schemaTx.getPropertyKey(this.schemaId()); // Create olap index label schema schemaTx.createIndexLabelForOlapPk(propertyKey); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/OlapPropertyKeyRemoveJob.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/OlapPropertyKeyRemoveJob.java index a9a5c4153e..81a56a0be6 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/OlapPropertyKeyRemoveJob.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/OlapPropertyKeyRemoveJob.java @@ -18,6 +18,7 @@ package org.apache.hugegraph.job.schema; import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.backend.tx.ISchemaTransaction; import org.apache.hugegraph.backend.tx.SchemaTransaction; import org.apache.hugegraph.schema.PropertyKey; @@ -42,7 +43,7 @@ public Object execute() { } // Remove olap property key - SchemaTransaction schemaTx = this.params().schemaTransaction(); + ISchemaTransaction schemaTx = this.params().schemaTransaction(); PropertyKey propertyKey = schemaTx.getPropertyKey(olap); removeSchema(schemaTx, propertyKey); return null; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/SchemaJob.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/SchemaJob.java index 166f310dc3..28d47877bb 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/SchemaJob.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/SchemaJob.java @@ -22,6 +22,7 @@ import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.id.IdGenerator; +import org.apache.hugegraph.backend.tx.ISchemaTransaction; import org.apache.hugegraph.backend.tx.SchemaTransaction; import org.apache.hugegraph.job.SysJob; import org.apache.hugegraph.schema.SchemaElement; @@ -85,7 +86,7 @@ public static String formatTaskName(HugeType type, Id id, String name) { * @param tx The remove operation actual executer * @param schema the schema to be removed */ - protected static void removeSchema(SchemaTransaction tx, + protected static void removeSchema(ISchemaTransaction tx, SchemaElement schema) { try { Method method = SchemaTransaction.class diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/VertexLabelRemoveJob.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/VertexLabelRemoveJob.java index 0f7fe942bd..4f7c926240 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/VertexLabelRemoveJob.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/job/schema/VertexLabelRemoveJob.java @@ -22,6 +22,7 @@ import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.tx.GraphTransaction; +import org.apache.hugegraph.backend.tx.ISchemaTransaction; import org.apache.hugegraph.backend.tx.SchemaTransaction; import org.apache.hugegraph.schema.EdgeLabel; import org.apache.hugegraph.schema.VertexLabel; @@ -46,7 +47,7 @@ public Object execute() { private static void removeVertexLabel(HugeGraphParams graph, Id id) { GraphTransaction graphTx = graph.graphTransaction(); - SchemaTransaction schemaTx = graph.schemaTransaction(); + ISchemaTransaction schemaTx = graph.schemaTransaction(); VertexLabel vertexLabel = schemaTx.getVertexLabel(id); // If the vertex label does not exist, return directly if (vertexLabel == null) { diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/EtcdMetaDriver.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/EtcdMetaDriver.java new file mode 100644 index 0000000000..8c9600e6b9 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/EtcdMetaDriver.java @@ -0,0 +1,322 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.meta; + +import java.io.File; +import java.net.URI; +import java.nio.charset.Charset; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ExecutionException; +import java.util.function.Consumer; + +import org.apache.commons.io.FileUtils; +import org.apache.hugegraph.HugeException; +import org.apache.hugegraph.meta.lock.EtcdDistributedLock; +import org.apache.hugegraph.meta.lock.LockResult; +import org.apache.hugegraph.type.define.CollectionType; +import org.apache.hugegraph.util.E; +import org.apache.hugegraph.util.collection.CollectionFactory; + +import com.google.common.base.Strings; + +import io.etcd.jetcd.ByteSequence; +import io.etcd.jetcd.Client; +import io.etcd.jetcd.ClientBuilder; +import io.etcd.jetcd.KV; +import io.etcd.jetcd.KeyValue; +import io.etcd.jetcd.kv.GetResponse; +import io.etcd.jetcd.lease.LeaseKeepAliveResponse; +import io.etcd.jetcd.options.DeleteOption; +import io.etcd.jetcd.options.GetOption; +import io.etcd.jetcd.options.WatchOption; +import io.etcd.jetcd.watch.WatchEvent; +import io.etcd.jetcd.watch.WatchResponse; +import io.netty.handler.ssl.ApplicationProtocolConfig; +import io.netty.handler.ssl.ApplicationProtocolNames; +import io.netty.handler.ssl.SslContext; +import io.netty.handler.ssl.SslContextBuilder; +import io.netty.handler.ssl.SslProvider; + +public class EtcdMetaDriver implements MetaDriver { + + private final Client client; + private final EtcdDistributedLock lock; + + public EtcdMetaDriver(String trustFile, String clientCertFile, + String clientKeyFile, Object... endpoints) { + ClientBuilder builder = this.etcdMetaDriverBuilder(endpoints); + + SslContext sslContext = openSslContext(trustFile, clientCertFile, + clientKeyFile); + this.client = builder.sslContext(sslContext).build(); + this.lock = EtcdDistributedLock.getInstance(this.client); + } + + public EtcdMetaDriver(Object... endpoints) { + ClientBuilder builder = this.etcdMetaDriverBuilder(endpoints); + this.client = builder.build(); + this.lock = EtcdDistributedLock.getInstance(this.client); + } + + private static ByteSequence toByteSequence(String content) { + return ByteSequence.from(content.getBytes()); + } + + private static boolean isEtcdPut(WatchEvent event) { + return event.getEventType() == WatchEvent.EventType.PUT; + } + + public static SslContext openSslContext(String trustFile, + String clientCertFile, + String clientKeyFile) { + SslContext ssl; + try { + File trustManagerFile = FileUtils.getFile(trustFile); + File keyCertChainFile = FileUtils.getFile(clientCertFile); + File KeyFile = FileUtils.getFile(clientKeyFile); + ApplicationProtocolConfig alpn = new ApplicationProtocolConfig( + ApplicationProtocolConfig.Protocol.ALPN, + ApplicationProtocolConfig.SelectorFailureBehavior.NO_ADVERTISE, + + ApplicationProtocolConfig.SelectedListenerFailureBehavior + .ACCEPT, + ApplicationProtocolNames.HTTP_2); + + ssl = SslContextBuilder.forClient() + .applicationProtocolConfig(alpn) + .sslProvider(SslProvider.OPENSSL) + .trustManager(trustManagerFile) + .keyManager(keyCertChainFile, KeyFile) + .build(); + } catch (Exception e) { + throw new HugeException("Failed to open ssl context", e); + } + return ssl; + } + + public ClientBuilder etcdMetaDriverBuilder(Object... endpoints) { + int length = endpoints.length; + ClientBuilder builder = null; + if (endpoints[0] instanceof List && endpoints.length == 1) { + builder = Client.builder() + .endpoints(((List) endpoints[0]) + .toArray(new String[0])); + } else if (endpoints[0] instanceof String) { + for (int i = 1; i < length; i++) { + E.checkArgument(endpoints[i] instanceof String, + "Inconsistent endpoint %s(%s) with %s(%s)", + endpoints[i], endpoints[i].getClass(), + endpoints[0], endpoints[0].getClass()); + } + builder = Client.builder().endpoints((String[]) endpoints); + } else if (endpoints[0] instanceof URI) { + for (int i = 1; i < length; i++) { + E.checkArgument(endpoints[i] instanceof String, + "Invalid endpoint %s(%s)", + endpoints[i], endpoints[i].getClass(), + endpoints[0], endpoints[0].getClass()); + } + builder = Client.builder().endpoints((URI[]) endpoints); + } else { + E.checkArgument(false, "Invalid endpoint %s(%s)", + endpoints[0], endpoints[0].getClass()); + } + return builder; + } + + @Override + public long keepAlive(String key, long leaseId) { + try { + LeaseKeepAliveResponse response = + this.client.getLeaseClient().keepAliveOnce(leaseId).get(); + return response.getID(); + } catch (InterruptedException | ExecutionException e) { + // keepAlive once Failed + return 0; + } + } + + @Override + public String get(String key) { + List keyValues; + KV kvClient = this.client.getKVClient(); + try { + keyValues = kvClient.get(toByteSequence(key)) + .get().getKvs(); + } catch (InterruptedException | ExecutionException e) { + throw new HugeException("Failed to get key '%s' from etcd", e, key); + } + + if (!keyValues.isEmpty()) { + return keyValues.get(0).getValue().toString(Charset.defaultCharset()); + } + + return null; + } + + @Override + public void put(String key, String value) { + KV kvClient = this.client.getKVClient(); + try { + kvClient.put(toByteSequence(key), toByteSequence(value)).get(); + } catch (InterruptedException | ExecutionException e) { + try { + kvClient.delete(toByteSequence(key)).get(); + } catch (Throwable t) { + throw new HugeException("Failed to put '%s:%s' to etcd", + e, key, value); + } + } + } + + @Override + public void delete(String key) { + KV kvClient = this.client.getKVClient(); + try { + kvClient.delete(toByteSequence(key)).get(); + } catch (InterruptedException | ExecutionException e) { + throw new HugeException( + "Failed to delete key '%s' from etcd", e, key); + } + } + + @Override + public void deleteWithPrefix(String prefix) { + KV kvClient = this.client.getKVClient(); + try { + DeleteOption option = DeleteOption.newBuilder() + .isPrefix(true) + .build(); + kvClient.delete(toByteSequence(prefix), option); + } catch (Throwable e) { + throw new HugeException( + "Failed to delete prefix '%s' from etcd", e, prefix); + } + } + + @Override + public Map scanWithPrefix(String prefix) { + GetOption getOption = GetOption.newBuilder() + .isPrefix(true) + .build(); + GetResponse response; + try { + response = this.client.getKVClient().get(toByteSequence(prefix), + getOption).get(); + } catch (InterruptedException | ExecutionException e) { + throw new HugeException("Failed to scan etcd with prefix '%s'", + e, prefix); + } + int size = (int) response.getCount(); + Map keyValues = CollectionFactory.newMap( + CollectionType.JCF, size); + for (KeyValue kv : response.getKvs()) { + String key = kv.getKey().toString(Charset.defaultCharset()); + String value = kv.getValue().isEmpty() ? "" : + kv.getValue().toString(Charset.defaultCharset()); + keyValues.put(key, value); + } + return keyValues; + } + + @Override + public List extractValuesFromResponse(T response) { + List values = new ArrayList<>(); + E.checkArgument(response instanceof WatchResponse, + "Invalid response type %s", response.getClass()); + for (WatchEvent event : ((WatchResponse) response).getEvents()) { + // Skip if not etcd PUT event + if (!isEtcdPut(event)) { + return null; + } + + String value = event.getKeyValue().getValue() + .toString(Charset.defaultCharset()); + values.add(value); + } + return values; + } + + @Override + public Map extractKVFromResponse(T response) { + E.checkArgument(response instanceof WatchResponse, + "Invalid response type %s", response.getClass()); + + Map resultMap = new HashMap<>(); + for (WatchEvent event : ((WatchResponse) response).getEvents()) { + // Skip if not etcd PUT event + if (!isEtcdPut(event)) { + continue; + } + + String key = event.getKeyValue().getKey().toString(Charset.defaultCharset()); + String value = event.getKeyValue().getValue() + .toString(Charset.defaultCharset()); + if (Strings.isNullOrEmpty(key)) { + continue; + } + resultMap.put(key, value); + } + return resultMap; + } + + @Override + public LockResult tryLock(String key, long ttl, long timeout) { + return this.lock.tryLock(key, ttl, timeout); + } + + @Override + public boolean isLocked(String key) { + try { + long size = this.client.getKVClient().get(toByteSequence(key)) + .get().getCount(); + + return size > 0; + } catch (InterruptedException | ExecutionException e) { + throw new HugeException("Failed to check is locked '%s'", e, key); + } + } + + @Override + public void unlock(String key, LockResult lockResult) { + this.lock.unLock(key, lockResult); + } + + @SuppressWarnings("unchecked") + @Override + public void listen(String key, Consumer consumer) { + + this.client.getWatchClient().watch(toByteSequence(key), + (Consumer) consumer); + } + + /** + * Listen etcd key with prefix + */ + @SuppressWarnings("unchecked") + @Override + public void listenPrefix(String prefix, Consumer consumer) { + ByteSequence sequence = toByteSequence(prefix); + WatchOption option = WatchOption.newBuilder().isPrefix(true).build(); + this.client.getWatchClient().watch(sequence, option, (Consumer) consumer); + + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/MetaDriver.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/MetaDriver.java new file mode 100644 index 0000000000..2d0936498f --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/MetaDriver.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.meta; + +import java.util.List; +import java.util.Map; +import java.util.function.Consumer; + +import org.apache.hugegraph.meta.lock.LockResult; + +public interface MetaDriver { + + public void put(String key, String value); + + public String get(String key); + + public void delete(String key); + + public void deleteWithPrefix(String prefix); + + public Map scanWithPrefix(String prefix); + + public void listen(String key, Consumer consumer); + + public void listenPrefix(String prefix, Consumer consumer); + + public List extractValuesFromResponse(T response); + + /** + * Extract K-V pairs of response + * + * @param + * @param response + * @return + */ + public Map extractKVFromResponse(T response); + + public LockResult tryLock(String key, long ttl, long timeout); + + /** + * return if the key is Locked. + * + * @param key + * @return bool + */ + public boolean isLocked(String key); + + public void unlock(String key, LockResult lockResult); + + /** + * keepAlive of current lease + * + * @param key + * @param lease + * @return next leaseId + */ + public long keepAlive(String key, long lease); +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/MetaManager.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/MetaManager.java new file mode 100644 index 0000000000..1662b114d4 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/MetaManager.java @@ -0,0 +1,1296 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.meta; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.function.Consumer; + +import org.apache.commons.lang3.StringUtils; +import org.apache.hugegraph.auth.HugeAccess; +import org.apache.hugegraph.auth.HugeBelong; +import org.apache.hugegraph.auth.HugeGroup; +import org.apache.hugegraph.auth.HugePermission; +import org.apache.hugegraph.auth.HugeRole; +import org.apache.hugegraph.auth.HugeTarget; +import org.apache.hugegraph.auth.HugeUser; +import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.backend.id.IdGenerator; +import org.apache.hugegraph.meta.lock.LockResult; +import org.apache.hugegraph.meta.managers.AuthMetaManager; +import org.apache.hugegraph.meta.managers.ConfigMetaManager; +import org.apache.hugegraph.meta.managers.GraphMetaManager; +import org.apache.hugegraph.meta.managers.KafkaMetaManager; +import org.apache.hugegraph.meta.managers.LockMetaManager; +import org.apache.hugegraph.meta.managers.SchemaMetaManager; +import org.apache.hugegraph.meta.managers.SchemaTemplateMetaManager; +import org.apache.hugegraph.meta.managers.ServiceMetaManager; +import org.apache.hugegraph.meta.managers.SpaceMetaManager; +import org.apache.hugegraph.meta.managers.TaskMetaManager; +import org.apache.hugegraph.schema.EdgeLabel; +import org.apache.hugegraph.schema.IndexLabel; +import org.apache.hugegraph.schema.PropertyKey; +import org.apache.hugegraph.schema.VertexLabel; +import org.apache.hugegraph.space.GraphSpace; +import org.apache.hugegraph.space.SchemaTemplate; +import org.apache.hugegraph.space.Service; +import org.apache.hugegraph.util.E; + +import com.google.common.collect.ImmutableMap; + +public class MetaManager { + + public static final String META_PATH_DELIMITER = "/"; + public static final String META_PATH_JOIN = "-"; + + public static final String META_PATH_HUGEGRAPH = "HUGEGRAPH"; + public static final String META_PATH_GRAPHSPACE = "GRAPHSPACE"; + public static final String META_PATH_GRAPHSPACE_LIST = "GRAPHSPACE_LIST"; + public static final String META_PATH_SERVICE = "SERVICE"; + public static final String META_PATH_SERVICE_CONF = "SERVICE_CONF"; + public static final String META_PATH_GRAPH_CONF = "GRAPH_CONF"; + public static final String META_PATH_CONF = "CONF"; + public static final String META_PATH_GRAPH = "GRAPH"; + public static final String META_PATH_SCHEMA = "SCHEMA"; + public static final String META_PATH_PROPERTY_KEY = "PROPERTY_KEY"; + public static final String META_PATH_VERTEX_LABEL = "VERTEX_LABEL"; + public static final String META_PATH_EDGE_LABEL = "EDGE_LABEL"; + public static final String META_PATH_INDEX_LABEL = "INDEX_LABEL"; + public static final String META_PATH_NAME = "NAME"; + public static final String META_PATH_ID = "ID"; + public static final String META_PATH_AUTH = "AUTH"; + public static final String META_PATH_USER = "USER"; + public static final String META_PATH_GROUP = "GROUP"; + public static final String META_PATH_ROLE = "ROLE"; + public static final String META_PATH_TARGET = "TARGET"; + public static final String META_PATH_BELONG = "BELONG"; + public static final String META_PATH_ACCESS = "ACCESS"; + public static final String META_PATH_K8S_BINDINGS = "BINDING"; + public static final String META_PATH_REST_PROPERTIES = "REST_PROPERTIES"; + public static final String META_PATH_GREMLIN_YAML = "GREMLIN_YAML"; + public static final String META_PATH_SCHEMA_TEMPLATE = "SCHEMA_TEMPLATE"; + public static final String META_PATH_TASK = "TASK"; + public static final String META_PATH_TASK_LOCK = "TASK_LOCK"; + public static final String META_PATH_AUTH_EVENT = "AUTH_EVENT"; + public static final String META_PATH_EVENT = "EVENT"; + public static final String META_PATH_ADD = "ADD"; + public static final String META_PATH_REMOVE = "REMOVE"; + public static final String META_PATH_UPDATE = "UPDATE"; + public static final String META_PATH_CLEAR = "CLEAR"; + public static final String META_PATH_DDS = "DDS_HOST"; + public static final String META_PATH_METRICS = "METRICS"; + public static final String META_PATH_KAFKA = "KAFKA"; + public static final String META_PATH_HOST = "BROKER_HOST"; + public static final String META_PATH_PORT = "BROKER_PORT"; + public static final String META_PATH_PARTITION_COUNT = "PARTITION_COUNT"; + public static final String META_PATH_DATA_SYNC_ROLE = "DATA_SYNC_ROLE"; + public static final String META_PATH_SLAVE_SERVER_HOST = "SLAVE_SERVER_HOST"; + public static final String META_PATH_SLAVE_SERVER_PORT = "SLAVE_SERVER_PORT"; + public static final String META_PATH_SYNC_BROKER = "SYNC_BROKER"; + public static final String META_PATH_SYNC_STORAGE = "SYNC_STORAGE"; + public static final String META_PATH_KAFKA_FILTER = "KAFKA-FILTER"; + public static final String META_PATH_WHITE_IP_LIST = "WHITE_IP_LIST"; + public static final String META_PATH_WHITE_IP_STATUS = "WHITE_IP_STATUS"; + public static final long LOCK_DEFAULT_LEASE = 30L; + public static final long LOCK_DEFAULT_TIMEOUT = 10L; + public static final int RANDOM_USER_ID = 100; + private static final String META_PATH_URLS = "URLS"; + private static final String META_PATH_PD_PEERS = "HSTORE_PD_PEERS"; + private static final MetaManager INSTANCE = new MetaManager(); + private MetaDriver metaDriver; + private String cluster; + private AuthMetaManager authMetaManager; + private GraphMetaManager graphMetaManager; + private SchemaMetaManager schemaMetaManager; + private ServiceMetaManager serviceMetaManager; + private SpaceMetaManager spaceMetaManager; + private TaskMetaManager taskMetaManager; + private ConfigMetaManager configMetaManager; + private KafkaMetaManager kafkaMetaManager; + private SchemaTemplateMetaManager schemaTemplateManager; + private LockMetaManager lockMetaManager; + + private MetaManager() { + } + + public static MetaManager instance() { + return INSTANCE; + } + + public synchronized boolean isReady() { + return null != this.metaDriver; + } + + public String cluster() { + return this.cluster; + } + + public synchronized void connect(String cluster, MetaDriverType type, + String trustFile, String clientCertFile, + String clientKeyFile, Object... args) { + E.checkArgument(cluster != null && !cluster.isEmpty(), + "The cluster can't be null or empty"); + if (this.metaDriver == null) { + this.cluster = cluster; + + switch (type) { + case ETCD: + this.metaDriver = trustFile == null || trustFile.isEmpty() ? + new EtcdMetaDriver(args) : + new EtcdMetaDriver(trustFile, + clientCertFile, + clientKeyFile, args); + break; + case PD: + assert args.length > 0; + String pdPeer = String.join(",", (List) args[0]); + this.metaDriver = new PdMetaDriver(pdPeer); + break; + default: + throw new AssertionError(String.format( + "Invalid meta driver type: %s", type)); + } + } + this.initManagers(this.cluster); + } + + private void initManagers(String cluster) { + this.authMetaManager = new AuthMetaManager(this.metaDriver, cluster); + this.graphMetaManager = new GraphMetaManager(this.metaDriver, cluster); + this.schemaMetaManager = new SchemaMetaManager(this.metaDriver, cluster, null); + this.serviceMetaManager = new ServiceMetaManager(this.metaDriver, cluster); + this.spaceMetaManager = new SpaceMetaManager(this.metaDriver, cluster); + this.taskMetaManager = new TaskMetaManager(this.metaDriver, cluster); + this.configMetaManager = new ConfigMetaManager(this.metaDriver, cluster); + this.kafkaMetaManager = new KafkaMetaManager(this.metaDriver, cluster); + this.schemaTemplateManager = new SchemaTemplateMetaManager(this.metaDriver, cluster); + this.lockMetaManager = new LockMetaManager(this.metaDriver, cluster); + } + + public void listenGraphSpaceAdd(Consumer consumer) { + this.spaceMetaManager.listenGraphSpaceAdd(consumer); + } + + public void listenGraphSpaceRemove(Consumer consumer) { + this.spaceMetaManager.listenGraphSpaceRemove(consumer); + } + + public void listenGraphSpaceUpdate(Consumer consumer) { + this.spaceMetaManager.listenGraphSpaceUpdate(consumer); + } + + public void notifyGraphSpaceAdd(String graphSpace) { + this.spaceMetaManager.notifyGraphSpaceAdd(graphSpace); + } + + public void notifyGraphSpaceRemove(String graphSpace) { + this.spaceMetaManager.notifyGraphSpaceRemove(graphSpace); + } + + public void notifyGraphSpaceUpdate(String graphSpace) { + this.spaceMetaManager.notifyGraphSpaceUpdate(graphSpace); + } + + public void listenServiceAdd(Consumer consumer) { + this.serviceMetaManager.listenServiceAdd(consumer); + } + + public void listenServiceRemove(Consumer consumer) { + this.serviceMetaManager.listenServiceRemove(consumer); + } + + public void listenServiceUpdate(Consumer consumer) { + this.serviceMetaManager.listenServiceUpdate(consumer); + } + + public void listenGraphAdd(Consumer consumer) { + this.graphMetaManager.listenGraphAdd(consumer); + } + + public void listenGraphUpdate(Consumer consumer) { + this.graphMetaManager.listenGraphUpdate(consumer); + } + + public void listenGraphRemove(Consumer consumer) { + this.graphMetaManager.listenGraphRemove(consumer); + } + + public void listenGraphClear(Consumer consumer) { + this.graphMetaManager.listenGraphClear(consumer); + } + + public void listenSchemaCacheClear(Consumer consumer) { + this.graphMetaManager.listenSchemaCacheClear(consumer); + } + + public void listenGraphCacheClear(Consumer consumer) { + this.graphMetaManager.listenGraphCacheClear(consumer); + } + + /** + * 监听vertex label变化, graph vertex cache clear + * + * @param consumer + * @param + */ + public void listenGraphVertexCacheClear(Consumer consumer) { + this.graphMetaManager.listenGraphVertexCacheClear(consumer); + } + + /** + * 监听edge label变化, graph edge cache clear + * + * @param consumer + * @param + */ + public void listenGraphEdgeCacheClear(Consumer consumer) { + this.graphMetaManager.listenGraphEdgeCacheClear(consumer); + } + + public void listenRestPropertiesUpdate(String graphSpace, + String serviceId, + Consumer consumer) { + this.configMetaManager.listenRestPropertiesUpdate(graphSpace, + serviceId, + consumer); + } + + public void listenGremlinYamlUpdate(String graphSpace, + String serviceId, + Consumer consumer) { + this.configMetaManager.listenGremlinYamlUpdate(graphSpace, + serviceId, + consumer); + } + + public void listenAuthEvent(Consumer consumer) { + this.authMetaManager.listenAuthEvent(consumer); + } + + private void putAuthEvent(AuthEvent event) { + this.authMetaManager.putAuthEvent(event); + } + + public void listenKafkaConfig(Consumer consumer) { + this.kafkaMetaManager.listenKafkaConfig(consumer); + } + + public String kafkaGetRaw(String key) { + return this.kafkaMetaManager.getRaw(key); + } + + public void kafkaPutOrDeleteRaw(String key, String val) { + this.kafkaMetaManager.putOrDeleteRaw(key, val); + } + + public Map graphSpaceConfigs() { + return this.spaceMetaManager.graphSpaceConfigs(); + } + + public Map serviceConfigs(String graphSpace) { + return this.serviceMetaManager.serviceConfigs(graphSpace); + } + + public Map> graphConfigs(String graphSpace) { + return this.graphMetaManager.graphConfigs(graphSpace); + } + + public Set schemaTemplates(String graphSpace) { + return this.schemaTemplateManager.schemaTemplates(graphSpace); + } + + @SuppressWarnings("unchecked") + public SchemaTemplate schemaTemplate(String graphSpace, + String schemaTemplate) { + return this.schemaTemplateManager.schemaTemplate(graphSpace, + schemaTemplate); + } + + public void addSchemaTemplate(String graphSpace, SchemaTemplate template) { + this.schemaTemplateManager.addSchemaTemplate(graphSpace, template); + } + + public void updateSchemaTemplate(String graphSpace, + SchemaTemplate template) { + this.schemaTemplateManager.updateSchemaTemplate(graphSpace, template); + } + + public void removeSchemaTemplate(String graphSpace, String name) { + this.schemaTemplateManager.removeSchemaTemplate(graphSpace, name); + } + + public void clearSchemaTemplate(String graphSpace) { + this.schemaTemplateManager.clearSchemaTemplate(graphSpace); + } + + public String extractGraphSpaceFromKey(String key) { + String[] parts = key.split(META_PATH_DELIMITER); + if (parts.length < 4) { + return null; + } + if (parts[3].equals(META_PATH_CONF)) { + return parts.length < 5 ? null : parts[4]; + } + return parts[3]; + } + + public List extractGraphFromKey(String key) { + String[] parts = key.split(META_PATH_DELIMITER); + if (parts.length < 6) { + return Collections.EMPTY_LIST; + } + return Arrays.asList(parts[3], parts[5]); + } + + public List extractGraphSpacesFromResponse(T response) { + return this.metaDriver.extractValuesFromResponse(response); + } + + public List extractServicesFromResponse(T response) { + return this.metaDriver.extractValuesFromResponse(response); + } + + public List extractGraphsFromResponse(T response) { + return this.metaDriver.extractValuesFromResponse(response); + } + + public Map extractKVFromResponse(T response) { + return this.metaDriver.extractKVFromResponse(response); + } + + public GraphSpace getGraphSpaceConfig(String graphSpace) { + return this.spaceMetaManager.getGraphSpaceConfig(graphSpace); + } + + public String getServiceRawConfig(String graphSpace, String service) { + return this.serviceMetaManager.getServiceRawConfig(graphSpace, service); + } + + public Service parseServiceRawConfig(String serviceRawConf) { + return this.serviceMetaManager.parseServiceRawConfig(serviceRawConf); + } + + public Service getServiceConfig(String graphSpace, String service) { + return this.serviceMetaManager.getServiceConfig(graphSpace, service); + } + + public Map getGraphConfig(String graphSpace, String graph) { + return this.graphMetaManager.getGraphConfig(graphSpace, graph); + } + + public void addGraphConfig(String graphSpace, String graph, + Map configs) { + this.graphMetaManager.addGraphConfig(graphSpace, graph, configs); + } + + public void updateGraphConfig(String graphSpace, String graph, + Map configs) { + this.graphMetaManager.updateGraphConfig(graphSpace, graph, configs); + } + + public GraphSpace graphSpace(String name) { + return this.spaceMetaManager.graphSpace(name); + } + + public void addGraphSpaceConfig(String name, GraphSpace space) { + this.spaceMetaManager.addGraphSpaceConfig(name, space); + } + + public void removeGraphSpaceConfig(String name) { + this.spaceMetaManager.removeGraphSpaceConfig(name); + } + + public void updateGraphSpaceConfig(String name, GraphSpace space) { + this.spaceMetaManager.updateGraphSpaceConfig(name, space); + } + + public void appendGraphSpaceList(String name) { + this.spaceMetaManager.appendGraphSpaceList(name); + } + + public void clearGraphSpaceList(String name) { + this.spaceMetaManager.clearGraphSpaceList(name); + } + + public void notifyServiceAdd(String graphSpace, String name) { + this.serviceMetaManager.notifyServiceAdd(graphSpace, name); + } + + public void notifyServiceRemove(String graphSpace, String name) { + this.serviceMetaManager.notifyServiceRemove(graphSpace, name); + } + + public void notifyServiceUpdate(String graphSpace, String name) { + this.serviceMetaManager.notifyServiceUpdate(graphSpace, name); + } + + public Service service(String graphSpace, String name) { + return this.serviceMetaManager.service(graphSpace, name); + } + + public void addServiceConfig(String graphSpace, Service service) { + this.serviceMetaManager.addServiceConfig(graphSpace, service); + } + + public void removeServiceConfig(String graphSpace, String service) { + this.serviceMetaManager.removeServiceConfig(graphSpace, service); + } + + public void updateServiceConfig(String graphSpace, Service service) { + this.addServiceConfig(graphSpace, service); + } + + public void removeGraphConfig(String graphSpace, String graph) { + this.graphMetaManager.removeGraphConfig(graphSpace, graph); + } + + public void notifyGraphAdd(String graphSpace, String graph) { + this.graphMetaManager.notifyGraphAdd(graphSpace, graph); + } + + public void notifyGraphRemove(String graphSpace, String graph) { + this.graphMetaManager.notifyGraphRemove(graphSpace, graph); + } + + public void notifyGraphUpdate(String graphSpace, String graph) { + this.graphMetaManager.notifyGraphUpdate(graphSpace, graph); + } + + public void notifyGraphClear(String graphSpace, String graph) { + this.graphMetaManager.notifyGraphClear(graphSpace, graph); + } + + public void notifySchemaCacheClear(String graphSpace, String graph) { + this.graphMetaManager.notifySchemaCacheClear(graphSpace, graph); + } + + public void notifyGraphCacheClear(String graphSpace, String graph) { + this.graphMetaManager.notifyGraphCacheClear(graphSpace, graph); + } + + /** + * 通知 需要进行 graph vertex cache clear + * + * @param graphSpace + * @param graph + */ + public void notifyGraphVertexCacheClear(String graphSpace, String graph) { + this.graphMetaManager.notifyGraphVertexCacheClear(graphSpace, graph); + } + + /** + * 通知 需要进行 graph edge cache clear + * + * @param graphSpace + * @param graph + */ + public void notifyGraphEdgeCacheClear(String graphSpace, String graph) { + this.graphMetaManager.notifyGraphEdgeCacheClear(graphSpace, graph); + } + + public LockResult tryLock(String key) { + return this.lockMetaManager.tryLock(key); + } + + public void unlock(LockResult lockResult, String... keys) { + this.lockMetaManager.unlock(lockResult, keys); + } + + public void unlock(String key, LockResult lockResult) { + this.lockMetaManager.unlock(key, lockResult); + } + + public String belongId(String userName, String roleName) { + return this.authMetaManager.belongId(userName, roleName, HugeBelong.UR); + } + + public String belongId(String source, String target, String link) { + return this.authMetaManager.belongId(source, target, link); + } + + public String accessId(String roleName, String targetName, + HugePermission permission) { + return this.authMetaManager.accessId(roleName, targetName, permission); + } + + private String graphSpaceBindingsServer(String name, BindingType type) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/CONF/{graphspace} + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + name, + META_PATH_K8S_BINDINGS, + type.name(), + META_PATH_URLS); + } + + /** + * Get DDS (eureka) host, format should be "ip:port", with no / + * + * @return + */ + private String ddsHostKey() { + // HUGEGRAPH/{cluster}/DDS_HOST + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_DDS); + } + + private String hugeClusterRoleKey() { + // HUGEGRAPH/{clusterRole}/KAFKA/DATA_SYNC_ROLE + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_KAFKA, + META_PATH_DATA_SYNC_ROLE); + } + + private String kafkaHostKey() { + // HUGEGRAPH/{cluster}/KAFKA/BROKER_HOST + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_KAFKA, + META_PATH_HOST); + } + + private String kafkaPortKey() { + // HUGEGRAPH/{cluster}/KAFKA/BROKER_PORT + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_KAFKA, + META_PATH_PORT); + } + + private String kafkaPartitionCountKey() { + // HUGEGRAPH/{cluster}/KAFKA/PARTITION_COUNT + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_KAFKA, + META_PATH_PARTITION_COUNT); + } + + private String kafkaSlaveHostKey() { + // HUGEGRAPH/{cluster}/KAFKA/SLAVE_SERVER_HOST + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_KAFKA, + META_PATH_SLAVE_SERVER_HOST); + } + + private String kafkaSlavePortKey() { + // HUGEGRAPH/{cluster}/KAFKA/SLAVE_SERVER_PORT + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_KAFKA, + META_PATH_SLAVE_SERVER_PORT); + } + + public String kafkaSyncBrokerKey() { + // HUGEGRAPH/{cluster}/KAFKA/SYNC_BROKER + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_KAFKA, + META_PATH_SYNC_BROKER); + } + + public String kafkaSyncStorageKey() { + // HUGEGRAPH/{cluster}/KAFKA/SYNC_STORAGE + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_KAFKA, + META_PATH_SYNC_STORAGE); + } + + public String kafkaFilterGraphspaceKey() { + // HUGEGRAPH/{cluster}/KAFKA-FILTER/GRAPHSPACE + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_KAFKA_FILTER, + META_PATH_GRAPHSPACE); + } + + public String kafkaFilterGraphKey() { + // HUGEGRAPH/{cluster}/KAFKA-FILTER/FILTER/GRAPH + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_KAFKA_FILTER, + META_PATH_GRAPH); + } + + private String whiteIpListKey() { + // HUGEGRAPH/{cluster}/WHITE_IP_LIST + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_WHITE_IP_LIST); + } + + private String whiteIpStatusKey() { + // HUGEGRAPH/{cluster}/WHITE_IP_STATUS + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_WHITE_IP_STATUS); + } + + private String hstorePDPeersKey() { + // HUGEGRAPH/{cluster}/META_PATH_PD_PEERS + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_PD_PEERS); + } + + public Id addPropertyKey(String graphSpace, String graph, + PropertyKey propertyKey) { + this.schemaMetaManager.addPropertyKey(graphSpace, graph, propertyKey); + return IdGenerator.ZERO; + } + + public void updatePropertyKey(String graphSpace, String graph, + PropertyKey pkey) { + this.schemaMetaManager.updatePropertyKey(graphSpace, graph, pkey); + } + + public PropertyKey getPropertyKey(String graphSpace, String graph, + Id propertyKey) { + return this.schemaMetaManager.getPropertyKey(graphSpace, graph, + propertyKey); + } + + public PropertyKey getPropertyKey(String graphSpace, String graph, + String propertyKey) { + return this.schemaMetaManager.getPropertyKey(graphSpace, graph, + propertyKey); + } + + public List getPropertyKeys(String graphSpace, String graph) { + return this.schemaMetaManager.getPropertyKeys(graphSpace, graph); + } + + public Id removePropertyKey(String graphSpace, String graph, + Id propertyKey) { + return this.schemaMetaManager.removePropertyKey(graphSpace, graph, + propertyKey); + } + + public void addVertexLabel(String graphSpace, String graph, + VertexLabel vertexLabel) { + this.schemaMetaManager.addVertexLabel(graphSpace, graph, vertexLabel); + } + + public void updateVertexLabel(String graphSpace, String graph, + VertexLabel vertexLabel) { + this.schemaMetaManager.updateVertexLabel(graphSpace, graph, + vertexLabel); + } + + public VertexLabel getVertexLabel(String graphSpace, String graph, + Id vertexLabel) { + return this.schemaMetaManager.getVertexLabel(graphSpace, graph, + vertexLabel); + } + + public VertexLabel getVertexLabel(String graphSpace, String graph, + String vertexLabel) { + return this.schemaMetaManager.getVertexLabel(graphSpace, graph, + vertexLabel); + } + + public List getVertexLabels(String graphSpace, String graph) { + return this.schemaMetaManager.getVertexLabels(graphSpace, graph); + } + + public Id removeVertexLabel(String graphSpace, String graph, + Id vertexLabel) { + return this.schemaMetaManager.removeVertexLabel(graphSpace, graph, + vertexLabel); + } + + public void addEdgeLabel(String graphSpace, String graph, + EdgeLabel edgeLabel) { + this.schemaMetaManager.addEdgeLabel(graphSpace, graph, edgeLabel); + } + + public void updateEdgeLabel(String graphSpace, String graph, + EdgeLabel edgeLabel) { + this.schemaMetaManager.updateEdgeLabel(graphSpace, graph, edgeLabel); + } + + + public EdgeLabel getEdgeLabel(String graphSpace, String graph, + Id edgeLabel) { + return this.schemaMetaManager.getEdgeLabel(graphSpace, graph, + edgeLabel); + } + + public EdgeLabel getEdgeLabel(String graphSpace, String graph, + String edgeLabel) { + return this.schemaMetaManager.getEdgeLabel(graphSpace, graph, + edgeLabel); + } + + public List getEdgeLabels(String graphSpace, String graph) { + return this.schemaMetaManager.getEdgeLabels(graphSpace, graph); + } + + public Id removeEdgeLabel(String graphSpace, String graph, Id edgeLabel) { + return this.schemaMetaManager.removeEdgeLabel(graphSpace, graph, + edgeLabel); + } + + public void addIndexLabel(String graphSpace, String graph, + IndexLabel indexLabel) { + this.schemaMetaManager.addIndexLabel(graphSpace, graph, indexLabel); + } + + public void updateIndexLabel(String graphSpace, String graph, + IndexLabel indexLabel) { + this.schemaMetaManager.updateIndexLabel(graphSpace, graph, indexLabel); + } + + public IndexLabel getIndexLabel(String graphSpace, String graph, + Id indexLabel) { + return this.schemaMetaManager.getIndexLabel(graphSpace, graph, + indexLabel); + } + + public IndexLabel getIndexLabel(String graphSpace, String graph, + String indexLabel) { + return this.schemaMetaManager.getIndexLabel(graphSpace, graph, + indexLabel); + } + + public List getIndexLabels(String graphSpace, String graph) { + return this.schemaMetaManager.getIndexLabels(graphSpace, graph); + } + + public Id removeIndexLabel(String graphSpace, String graph, Id indexLabel) { + return this.schemaMetaManager.removeIndexLabel(graphSpace, graph, + indexLabel); + } + + public void createUser(HugeUser user) throws IOException { + this.authMetaManager.createUser(user); + } + + public HugeUser updateUser(HugeUser user) throws IOException { + return this.authMetaManager.updateUser(user); + } + + public HugeUser deleteUser(Id id) throws IOException, + ClassNotFoundException { + return this.authMetaManager.deleteUser(id); + } + + public HugeUser findUser(String name) + throws IOException, ClassNotFoundException { + return this.authMetaManager.findUser(name); + } + + public List listUsers(List ids) throws IOException, + ClassNotFoundException { + return this.authMetaManager.listUsers(ids); + } + + public List listAllUsers(long limit) + throws IOException, + ClassNotFoundException { + return this.authMetaManager.listAllUsers(limit); + } + + public Id createGroup(HugeGroup group) throws IOException { + return this.authMetaManager.createGroup(group); + } + + public HugeGroup updateGroup(HugeGroup group) throws IOException { + return this.authMetaManager.updateGroup(group); + } + + public HugeGroup deleteGroup(Id id) throws IOException, + ClassNotFoundException { + return this.authMetaManager.deleteGroup(id); + } + + public HugeGroup findGroup(String name) throws IOException, + ClassNotFoundException { + return this.authMetaManager.findGroup(name); + } + + public List listGroups(long limit) throws IOException, + ClassNotFoundException { + return this.authMetaManager.listGroups(limit); + } + + public Id createRole(String graphSpace, HugeRole role) + throws IOException { + return this.authMetaManager.createRole(graphSpace, role); + } + + public HugeRole updateRole(String graphSpace, HugeRole role) + throws IOException { + return this.authMetaManager.updateRole(graphSpace, role); + } + + public HugeRole deleteRole(String graphSpace, Id id) + throws IOException, + ClassNotFoundException { + return this.authMetaManager.deleteRole(graphSpace, id); + } + + public HugeRole findRole(String graphSpace, Id id) { + return this.authMetaManager.findRole(graphSpace, id); + } + + public HugeRole getRole(String graphSpace, Id id) + throws IOException, + ClassNotFoundException { + return this.authMetaManager.getRole(graphSpace, id); + } + + public List listRoles(String graphSpace, List ids) + throws IOException, + ClassNotFoundException { + return this.authMetaManager.listRoles(graphSpace, ids); + } + + public List listAllRoles(String graphSpace, long limit) + throws IOException, + ClassNotFoundException { + return this.authMetaManager.listAllRoles(graphSpace, limit); + } + + public Id createTarget(String graphSpace, HugeTarget target) + throws IOException { + return this.authMetaManager.createTarget(graphSpace, target); + } + + public HugeTarget updateTarget(String graphSpace, HugeTarget target) + throws IOException { + return this.authMetaManager.updateTarget(graphSpace, target); + } + + public HugeTarget deleteTarget(String graphSpace, Id id) + throws IOException, + ClassNotFoundException { + return this.authMetaManager.deleteTarget(graphSpace, id); + } + + public HugeTarget findTarget(String graphSpace, Id id) { + return this.authMetaManager.findTarget(graphSpace, id); + } + + public HugeTarget getTarget(String graphSpace, Id id) + throws IOException, + ClassNotFoundException { + return this.authMetaManager.getTarget(graphSpace, id); + } + + public List listTargets(String graphSpace, List ids) + throws IOException, + ClassNotFoundException { + return this.authMetaManager.listTargets(graphSpace, ids); + } + + public List listAllTargets(String graphSpace, long limit) + throws IOException, + ClassNotFoundException { + return this.authMetaManager.listAllTargets(graphSpace, limit); + } + + public Id createBelong(String graphSpace, HugeBelong belong) + throws IOException, ClassNotFoundException { + return this.authMetaManager.createBelong(graphSpace, belong); + } + + public HugeBelong updateBelong(String graphSpace, HugeBelong belong) + throws IOException, ClassNotFoundException { + return this.authMetaManager.updateBelong(graphSpace, belong); + } + + public HugeBelong deleteBelong(String graphSpace, Id id) + throws IOException, + ClassNotFoundException { + return this.authMetaManager.deleteBelong(graphSpace, id); + } + + public HugeBelong getBelong(String graphSpace, Id id) + throws IOException, + ClassNotFoundException { + return this.authMetaManager.getBelong(graphSpace, id); + } + + public boolean existBelong(String graphSpace, Id id) { + return this.authMetaManager.existBelong(graphSpace, id); + } + + public List listBelong(String graphSpace, List ids) + throws IOException, + ClassNotFoundException { + return this.authMetaManager.listBelong(graphSpace, ids); + } + + public List listAllBelong(String graphSpace, long limit) + throws IOException, + ClassNotFoundException { + return this.authMetaManager.listAllBelong(graphSpace, limit); + } + + public List listBelongBySource(String graphSpace, + Id user, String link, long limit) + throws IOException, + ClassNotFoundException { + return this.authMetaManager.listBelongBySource(graphSpace, user, + link, limit); + } + + public List listBelongByTarget(String graphSpace, + Id role, String link, long limit) + throws IOException, + ClassNotFoundException { + return this.authMetaManager.listBelongByTarget(graphSpace, role, + link, limit); + } + + public Id createAccess(String graphSpace, HugeAccess access) + throws IOException, ClassNotFoundException { + return this.authMetaManager.createAccess(graphSpace, access); + } + + public HugeAccess updateAccess(String graphSpace, HugeAccess access) + throws IOException, ClassNotFoundException { + return this.authMetaManager.updateAccess(graphSpace, access); + } + + public HugeAccess deleteAccess(String graphSpace, Id id) + throws IOException, ClassNotFoundException { + return this.authMetaManager.deleteAccess(graphSpace, id); + } + + public HugeAccess findAccess(String graphSpace, Id id) { + return this.authMetaManager.findAccess(graphSpace, id); + } + + public HugeAccess getAccess(String graphSpace, Id id) + throws IOException, ClassNotFoundException { + return this.authMetaManager.getAccess(graphSpace, id); + } + + public List listAccess(String graphSpace, List ids) + throws IOException, + ClassNotFoundException { + return this.authMetaManager.listAccess(graphSpace, ids); + } + + public List listAllAccess(String graphSpace, long limit) + throws IOException, + ClassNotFoundException { + return this.authMetaManager.listAllAccess(graphSpace, limit); + } + + public List listAccessByRole(String graphSpace, + Id role, long limit) + throws IOException, + ClassNotFoundException { + return this.authMetaManager.listAccessByRole(graphSpace, role, limit); + } + + public String targetFromAccess(String accessKey) { + return this.authMetaManager.targetFromAccess(accessKey); + } + + public void clearGraphAuth(String graphSpace) { + this.authMetaManager.clearGraphAuth(graphSpace); + } + + public List listAccessByTarget(String graphSpace, + Id target, long limit) + throws IOException, + ClassNotFoundException { + return this.authMetaManager.listAccessByTarget(graphSpace, target, + limit); + } + + public List listGraphSpace() { + return this.spaceMetaManager.listGraphSpace(); + } + + public void initDefaultGraphSpace() { + String defaultGraphSpace = "DEFAULT"; + this.appendGraphSpaceList(defaultGraphSpace); + } + + public Map restProperties(String graphSpace, + String serviceId) { + return this.configMetaManager.restProperties(graphSpace, serviceId); + } + + public Map restProperties(String graphSpace, + String serviceId, + Map properties) { + return this.configMetaManager.restProperties(graphSpace, serviceId, + properties); + } + + public Map deleteRestProperties(String graphSpace, + String serviceId, + String key) { + return this.configMetaManager.deleteRestProperties(graphSpace, + serviceId, key); + } + + public Map clearRestProperties(String graphSpace, + String serviceId) { + return this.configMetaManager.clearRestProperties(graphSpace, + serviceId); + } + + public LockResult tryLockTask(String graphSpace, String graphName, + String taskId) { + return this.taskMetaManager.tryLockTask(graphSpace, graphName, taskId); + } + + public boolean isLockedTask(String graphSpace, String graphName, + String taskId) { + return this.taskMetaManager.isLockedTask(graphSpace, graphName, taskId); + } + + public void unlockTask(String graphSpace, String graphName, + String taskId, LockResult lockResult) { + this.taskMetaManager.unlockTask(graphSpace, graphName, taskId, lockResult); + } + + public String gremlinYaml(String graphSpace, String serviceId) { + return this.configMetaManager.gremlinYaml(graphSpace, serviceId); + } + + public String gremlinYaml(String graphSpace, String serviceId, + String yaml) { + return this.configMetaManager.gremlinYaml(graphSpace, serviceId, yaml); + } + + public String hstorePDPeers() { + return this.metaDriver.get(hstorePDPeersKey()); + } + + public void listenAll(Consumer consumer) { + this.metaDriver.listenPrefix(MetaManager.META_PATH_HUGEGRAPH, consumer); + } + + public SchemaMetaManager schemaMetaManager() { + return this.schemaMetaManager; + } + + public MetaDriver metaDriver() { + return this.metaDriver; + } + + public String getDDSHost() { + String key = this.ddsHostKey(); + String host = this.metaDriver.get(key); + return host; + } + + public String getHugeGraphClusterRole() { + String key = this.hugeClusterRoleKey(); + String role = this.metaDriver.get(key); + return role; + } + + public String getKafkaBrokerHost() { + String key = this.kafkaHostKey(); + return this.metaDriver.get(key); + } + + public String getKafkaBrokerPort() { + String key = this.kafkaPortKey(); + return this.metaDriver.get(key); + } + + public Integer getPartitionCount() { + String key = this.kafkaPartitionCountKey(); + String result = this.metaDriver.get(key); + try { + Integer count = Integer.parseInt(Optional.ofNullable(result) + .orElse("0")); + return count < 1 ? 1 : count; + } catch (Exception e) { + return 1; + } + } + + public String getKafkaSlaveServerHost() { + String key = this.kafkaSlaveHostKey(); + return this.metaDriver.get(key); + } + + public Integer getKafkaSlaveServerPort() { + String key = this.kafkaSlavePortKey(); + String portStr = this.metaDriver.get(key); + int port = Integer.parseInt(portStr); + return port; + } + + public List getKafkaFilteredGraphspace() { + String key = this.kafkaFilterGraphspaceKey(); + + String raw = this.metaDriver.get(key); + if (StringUtils.isEmpty(raw)) { + return Collections.EMPTY_LIST; + } + String[] parts = raw.split(","); + return Arrays.asList(parts); + } + + public List getKafkaFilteredGraph() { + String key = this.kafkaFilterGraphKey(); + + String raw = this.metaDriver.get(key); + if (StringUtils.isEmpty(raw)) { + return Collections.EMPTY_LIST; + } + String[] parts = raw.split(","); + return Arrays.asList(parts); + } + + public void updateKafkaFilteredGraphspace(List graphSpaces) { + String key = this.kafkaFilterGraphspaceKey(); + String val = String.join(",", graphSpaces); + this.metaDriver.put(key, val); + + } + + public void updateKafkaFilteredGraph(List graphs) { + String key = this.kafkaFilterGraphKey(); + String val = String.join(",", graphs); + this.metaDriver.put(key, val); + } + + public List getWhiteIpList() { + String key = this.whiteIpListKey(); + + String raw = this.metaDriver.get(key); + if (StringUtils.isEmpty(raw)) { + return new ArrayList<>(); + } + String[] parts = raw.split(","); + return new ArrayList<>(Arrays.asList(parts)); + } + + public void setWhiteIpList(List whiteIpList) { + String key = this.whiteIpListKey(); + + String val = String.join(",", whiteIpList); + this.metaDriver.put(key, val); + } + + public String getCompStatus(String statuskey) { + String raw = this.metaDriver.get(statuskey); + if (StringUtils.isEmpty(raw)) { + return ""; + } + return raw; + } + + public boolean getWhiteIpStatus() { + String key = this.whiteIpStatusKey(); + String raw = this.metaDriver.get(key); + return ("true".equals(raw)); + } + + public void setWhiteIpStatus(boolean status) { + String key = this.whiteIpStatusKey(); + this.metaDriver.put(key, ((Boolean) status).toString()); + } + + public enum MetaDriverType { + ETCD, + PD + } + + public enum BindingType { + OLTP, + OLAP, + STORAGE + } + + public static class AuthEvent { + private String op; // ALLOW: CREATE | DELETE | UPDATE + private String type; // ALLOW: USER | GROUP | TARGET | ACCESS | BELONG + private String id; + + public AuthEvent(String op, String type, String id) { + this.op = op; + this.type = type; + this.id = id; + } + + public AuthEvent(Map properties) { + this.op = properties.get("op").toString(); + this.type = properties.get("type").toString(); + this.id = properties.get("id").toString(); + } + + public String op() { + return this.op; + } + + public void op(String op) { + this.op = op; + } + + public String type() { + return this.type; + } + + public void type(String type) { + this.type = type; + } + + public String id() { + return this.id; + } + + public void id(String id) { + this.id = id; + } + + public Map asMap() { + return ImmutableMap.of("op", this.op, + "type", this.type, + "id", this.id); + } + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/PdMetaDriver.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/PdMetaDriver.java new file mode 100644 index 0000000000..f7da14196c --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/PdMetaDriver.java @@ -0,0 +1,212 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.meta; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Consumer; + +import org.apache.hugegraph.HugeException; +import org.apache.hugegraph.meta.lock.LockResult; +import org.apache.hugegraph.meta.lock.PdDistributedLock; +import org.apache.hugegraph.pd.client.KvClient; +import org.apache.hugegraph.pd.client.PDClient; +import org.apache.hugegraph.pd.client.PDConfig; +import org.apache.hugegraph.pd.common.PDException; +import org.apache.hugegraph.pd.grpc.kv.KResponse; +import org.apache.hugegraph.pd.grpc.kv.LockResponse; +import org.apache.hugegraph.pd.grpc.kv.ScanPrefixResponse; +import org.apache.hugegraph.pd.grpc.kv.TTLResponse; +import org.apache.hugegraph.pd.grpc.kv.WatchEvent; +import org.apache.hugegraph.pd.grpc.kv.WatchResponse; +import org.apache.hugegraph.pd.grpc.kv.WatchType; + +import com.google.common.base.Strings; + +public class PdMetaDriver implements MetaDriver { + + private final KvClient client; + private final PDClient pdClient; + private final PdDistributedLock lock; + + public PdMetaDriver(String pdPeer) { + PDConfig pdConfig = PDConfig.of(pdPeer); + this.client = new KvClient<>(pdConfig); + this.pdClient = PDClient.create(pdConfig); + this.lock = new PdDistributedLock(this.client); + } + + public PDClient pdClient() { + return this.pdClient; + } + + @Override + public void put(String key, String value) { + try { + this.client.put(key, value); + } catch (PDException e) { + throw new HugeException("Failed to put '%s:%s' to pd", e, key, value); + } + } + + @Override + public String get(String key) { + try { + KResponse response = this.client.get(key); + return response.getValue(); + } catch (PDException e) { + throw new HugeException("Failed to get '%s' from pd", e, key); + } + } + + @Override + public void delete(String key) { + try { + this.client.delete(key); + } catch (PDException e) { + throw new HugeException("Failed to delete '%s' from pd", e, key); + } + } + + @Override + public void deleteWithPrefix(String prefix) { + try { + this.client.deletePrefix(prefix); + } catch (PDException e) { + throw new HugeException("Failed to deleteWithPrefix '%s' from pd", e, prefix); + } + } + + @Override + public Map scanWithPrefix(String prefix) { + try { + ScanPrefixResponse response = this.client.scanPrefix(prefix); + return response.getKvsMap(); + } catch (PDException e) { + throw new HugeException("Failed to scanWithPrefix '%s' from pd", e, prefix); + } + } + + @Override + public void listen(String key, Consumer consumer) { + try { + this.client.listen(key, (Consumer) consumer); + } catch (PDException e) { + throw new HugeException("Failed to listen '%s' to pd", e, key); + } + } + + @Override + public void listenPrefix(String prefix, Consumer consumer) { + try { + this.client.listenPrefix(prefix, (Consumer) consumer); + } catch (PDException e) { + throw new HugeException("Failed to listenPrefix '%s' to pd", e, prefix); + } + } + + @Override + public List extractValuesFromResponse(T response) { + List values = new ArrayList<>(); + WatchResponse res = (WatchResponse) response; + for (WatchEvent event : res.getEventsList()) { + // Skip if not PUT event + if (!event.getType().equals(WatchType.Put)) { + return null; + } + String value = event.getCurrent().getValue(); + values.add(value); + } + return values; + } + + @Override + public Map extractKVFromResponse(T response) { + Map resultMap = new HashMap<>(); + WatchResponse res = (WatchResponse) response; + for (WatchEvent event : res.getEventsList()) { + // Skip if not etcd PUT event + if (!event.getType().equals(WatchType.Put)) { + continue; + } + + String key = event.getCurrent().getKey(); + String value = event.getCurrent().getValue(); + if (Strings.isNullOrEmpty(key)) { + continue; + } + resultMap.put(key, value); + } + return resultMap; + } + + @Override + public LockResult tryLock(String key, long ttl, long timeout) { + return this.lock.lock(key, ttl); + } + + @Override + public boolean isLocked(String key) { + LockResponse locked; + try { + locked = this.client.isLocked(key); + } catch (PDException e) { + throw new HugeException("Failed to get isLocked '%s' from pd", key); + } + return locked.getSucceed(); + } + + @Override + public void unlock(String key, LockResult lockResult) { + this.lock.unLock(key, lockResult); + } + + @Override + public long keepAlive(String key, long lease) { + try { + LockResponse lockResponse = this.client.keepAlive(key); + boolean succeed = lockResponse.getSucceed(); + if (!succeed) { + throw new HugeException("Failed to keepAlive '%s' to pd", key); + } + return lockResponse.getClientId(); + } catch (PDException e) { + throw new HugeException("Failed to keepAlive '%s' to pd", e, key); + } + } + + public boolean keepTTLAlive(String key) { + try { + TTLResponse response = this.client.keepTTLAlive(key); + return response.getSucceed(); + } catch (PDException e) { + throw new HugeException("Failed to keepTTLAlive '%s' to pd", e, key); + } + } + + public boolean putTTL(String key, String value, long ttl) { + try { + TTLResponse response = this.client.putTTL(key, value, ttl); + return response.getSucceed(); + } catch (PDException e) { + throw new HugeException("Failed to keepTTLAlive '%s' to pd", e, key); + } + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/lock/EtcdDistributedLock.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/lock/EtcdDistributedLock.java new file mode 100644 index 0000000000..ed62a429f0 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/lock/EtcdDistributedLock.java @@ -0,0 +1,167 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.meta.lock; + +import java.nio.charset.Charset; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.ScheduledThreadPoolExecutor; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; + +import org.apache.hugegraph.util.Log; +import org.slf4j.Logger; + +import io.etcd.jetcd.ByteSequence; +import io.etcd.jetcd.Client; +import io.etcd.jetcd.KV; +import io.etcd.jetcd.Lease; +import io.etcd.jetcd.Lock; + +public class EtcdDistributedLock { + + protected static final Logger LOG = Log.logger(EtcdDistributedLock.class); + private static final long UNLIMITED_TIMEOUT = -1L; + private final static Object mutex = new Object(); + private static EtcdDistributedLock lockProvider = null; + private final KV kvClient; + private final Lock lockClient; + private final Lease leaseClient; + + private static final int poolSize = 8; + private final ScheduledExecutorService service = new ScheduledThreadPoolExecutor(poolSize, r -> { + Thread t = new Thread(r, "keepalive"); + t.setDaemon(true); + return t; + }); + + private EtcdDistributedLock(Client client) { + this.kvClient = client.getKVClient(); + this.lockClient = client.getLockClient(); + this.leaseClient = client.getLeaseClient(); + } + + public static EtcdDistributedLock getInstance(Client client) { + synchronized (mutex) { + if (null == lockProvider) { + lockProvider = new EtcdDistributedLock(client); + } + } + return lockProvider; + } + + private static ByteSequence toByteSequence(String content) { + return ByteSequence.from(content, Charset.defaultCharset()); + } + + public LockResult tryLock(String lockName, long ttl, long timeout) { + LockResult lockResult = new LockResult(); + lockResult.lockSuccess(false); + lockResult.setService(service); + + long leaseId; + + try { + leaseId = this.leaseClient.grant(ttl).get().getID(); + } catch (InterruptedException | ExecutionException e) { + LOG.warn(String.format("Thread {} failed to create lease for {} " + + "with ttl {}", Thread.currentThread().getName(), + lockName, ttl), + e); + return lockResult; + } + + lockResult.setLeaseId(leaseId); + + long period = ttl - ttl / 5; + service.scheduleAtFixedRate(new KeepAliveTask(this.leaseClient, leaseId), + period, period, TimeUnit.SECONDS); + + try { + if (timeout == UNLIMITED_TIMEOUT) { + this.lockClient.lock(toByteSequence(lockName), leaseId).get(); + + } else { + this.lockClient.lock(toByteSequence(lockName), leaseId) + .get(1, TimeUnit.SECONDS); + } + } catch (InterruptedException | ExecutionException e) { + LOG.warn(String.format("Thread {} failed to lock {}", + Thread.currentThread().getName(), lockName), + e); + service.shutdown(); + this.revokeLease(leaseId); + return lockResult; + } catch (TimeoutException e) { + // 获取锁超时 + LOG.warn("Thread {} timeout to lock {}", + Thread.currentThread().getName(), lockName); + service.shutdown(); + this.revokeLease(leaseId); + return lockResult; + } + + lockResult.lockSuccess(true); + + return lockResult; + } + + public LockResult lock(String lockName, long ttl) { + return tryLock(lockName, ttl, UNLIMITED_TIMEOUT); + } + + public void unLock(String lockName, LockResult lockResult) { + LOG.debug("Thread {} start to unlock {}", + Thread.currentThread().getName(), lockName); + + lockResult.getService().shutdown(); + + if (lockResult.getLeaseId() != 0L) { + this.revokeLease(lockResult.getLeaseId()); + } + + LOG.debug("Thread {} unlock {} successfully", + Thread.currentThread().getName(), lockName); + } + + private void revokeLease(long leaseId) { + try { + this.leaseClient.revoke(leaseId).get(); + } catch (InterruptedException | ExecutionException e) { + LOG.warn(String.format("Thread %s failed to revoke release %s", + Thread.currentThread().getName(), leaseId), e); + } + } + + public static class KeepAliveTask implements Runnable { + + private final Lease leaseClient; + private final long leaseId; + + KeepAliveTask(Lease leaseClient, long leaseId) { + this.leaseClient = leaseClient; + this.leaseId = leaseId; + } + + @Override + public void run() { + // TODO: calculate the time interval between the calls + this.leaseClient.keepAliveOnce(this.leaseId); + } + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/lock/LockResult.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/lock/LockResult.java new file mode 100644 index 0000000000..6909b73183 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/lock/LockResult.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.meta.lock; + +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.ScheduledFuture; + +public class LockResult { + + private boolean lockSuccess; + private long leaseId; + private ScheduledExecutorService service; + private ScheduledFuture future; + + public void lockSuccess(boolean isLockSuccess) { + this.lockSuccess = isLockSuccess; + } + + public boolean lockSuccess() { + return this.lockSuccess; + } + + public long getLeaseId() { + return this.leaseId; + } + + public void setLeaseId(long leaseId) { + this.leaseId = leaseId; + } + + public ScheduledExecutorService getService() { + return this.service; + } + + public void setService(ScheduledExecutorService service) { + this.service = service; + } + + public ScheduledFuture getFuture() { + return future; + } + + public void setFuture(ScheduledFuture future) { + this.future = future; + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/lock/PdDistributedLock.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/lock/PdDistributedLock.java new file mode 100644 index 0000000000..7da78af063 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/lock/PdDistributedLock.java @@ -0,0 +1,94 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.meta.lock; + +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.ScheduledThreadPoolExecutor; +import java.util.concurrent.TimeUnit; + +import org.apache.hugegraph.HugeException; +import org.apache.hugegraph.pd.client.KvClient; +import org.apache.hugegraph.pd.common.PDException; +import org.apache.hugegraph.pd.grpc.kv.LockResponse; + +public class PdDistributedLock { + + private static final int poolSize = 8; + private final KvClient client; + private final ScheduledExecutorService service = new ScheduledThreadPoolExecutor(poolSize, r -> { + Thread t = new Thread(r, "keepalive"); + t.setDaemon(true); + return t; + }); + + public PdDistributedLock(KvClient client) { + this.client = client; + } + + public LockResult lock(String key, long second) { + long ttl = second * 1000L; + try { + LockResponse response = this.client.lock(key, ttl); + boolean succeed = response.getSucceed(); + LockResult result = new LockResult(); + if (succeed) { + result.setLeaseId(response.getClientId()); + result.lockSuccess(true); + long period = ttl - ttl / 4; + ScheduledFuture future = service.scheduleAtFixedRate(() -> { + // TODO: why synchronized? + synchronized (result) { + keepAlive(key); + } + }, 10, period, TimeUnit.MILLISECONDS); + result.setFuture(future); + } + return result; + } catch (PDException e) { + throw new HugeException("Failed to lock '%s' to pd", e, key); + } + } + + public void unLock(String key, LockResult lockResult) { + try { + LockResponse response = this.client.unlock(key); + boolean succeed = response.getSucceed(); + if (!succeed) { + throw new HugeException("Failed to unlock '%s' to pd", key); + } + if (lockResult.getFuture() != null) { + // TODO: why synchronized? + synchronized (lockResult) { + lockResult.getFuture().cancel(true); + } + } + } catch (PDException e) { + throw new HugeException("Failed to unlock '%s' to pd", e, key); + } + } + + public boolean keepAlive(String key) { + try { + LockResponse alive = this.client.keepAlive(key); + return alive.getSucceed(); + } catch (PDException e) { + throw new HugeException("Failed to keepAlive '%s' to pd", key); + } + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/AbstractMetaManager.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/AbstractMetaManager.java new file mode 100644 index 0000000000..b1928d38eb --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/AbstractMetaManager.java @@ -0,0 +1,94 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.meta.managers; + +import static org.apache.hugegraph.meta.MetaManager.LOCK_DEFAULT_LEASE; +import static org.apache.hugegraph.meta.MetaManager.LOCK_DEFAULT_TIMEOUT; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_DELIMITER; + +import java.util.Map; +import java.util.Optional; +import java.util.function.Consumer; + +import org.apache.commons.lang3.StringUtils; +import org.apache.hugegraph.auth.SchemaDefine; +import org.apache.hugegraph.meta.MetaDriver; +import org.apache.hugegraph.meta.lock.LockResult; +import org.apache.hugegraph.schema.SchemaElement; +import org.apache.hugegraph.util.JsonUtil; + +public class AbstractMetaManager { + + protected final MetaDriver metaDriver; + protected final String cluster; + + public AbstractMetaManager(MetaDriver metaDriver, String cluster) { + this.metaDriver = metaDriver; + this.cluster = cluster; + } + + protected static String serialize(SchemaDefine.AuthElement element) { + Map objectMap = element.asMap(); + return JsonUtil.toJson(objectMap); + } + + protected static String serialize(SchemaElement element) { + Map objectMap = element.asMap(); + return JsonUtil.toJson(objectMap); + } + + @SuppressWarnings("unchecked") + protected static Map configMap(String config) { + return JsonUtil.fromJson(config, Map.class); + } + + protected void listen(String key, Consumer consumer) { + this.metaDriver.listen(key, consumer); + } + + protected void listenPrefix(String prefix, Consumer consumer) { + this.metaDriver.listenPrefix(prefix, consumer); + } + + public String getRaw(String key) { + String result = this.metaDriver.get(key); + return Optional.ofNullable(result).orElse(""); + } + + public void putOrDeleteRaw(String key, String val) { + if (StringUtils.isEmpty(val)) { + this.metaDriver.delete(key); + } else { + this.metaDriver.put(key, val); + } + } + + public LockResult tryLock(String key) { + return this.metaDriver.tryLock(key, LOCK_DEFAULT_LEASE, + LOCK_DEFAULT_TIMEOUT); + } + + public void unlock(LockResult lockResult, String... keys) { + String key = String.join(META_PATH_DELIMITER, keys); + this.unlock(key, lockResult); + } + + public void unlock(String key, LockResult lockResult) { + this.metaDriver.unlock(key, lockResult); + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/AuthMetaManager.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/AuthMetaManager.java new file mode 100644 index 0000000000..2160226fbf --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/AuthMetaManager.java @@ -0,0 +1,1035 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.meta.managers; + +import static org.apache.hugegraph.meta.MetaManager.META_PATH_ACCESS; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_AUTH; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_AUTH_EVENT; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_BELONG; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_DELIMITER; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_GRAPHSPACE; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_GROUP; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_HUGEGRAPH; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_ROLE; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_TARGET; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_USER; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Date; +import java.util.List; +import java.util.Map; +import java.util.function.Consumer; + +import org.apache.commons.lang3.StringUtils; +import org.apache.hugegraph.auth.HugeAccess; +import org.apache.hugegraph.auth.HugeBelong; +import org.apache.hugegraph.auth.HugeGroup; +import org.apache.hugegraph.auth.HugePermission; +import org.apache.hugegraph.auth.HugeRole; +import org.apache.hugegraph.auth.HugeTarget; +import org.apache.hugegraph.auth.HugeUser; +import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.backend.id.IdGenerator; +import org.apache.hugegraph.meta.MetaDriver; +import org.apache.hugegraph.meta.MetaManager; +import org.apache.hugegraph.util.E; +import org.apache.hugegraph.util.JsonUtil; + +public class AuthMetaManager extends AbstractMetaManager { + + public AuthMetaManager(MetaDriver metaDriver, String cluster) { + super(metaDriver, cluster); + } + + + public void createUser(HugeUser user) throws IOException { + String result = this.metaDriver.get(userKey(user.name())); + E.checkArgument(StringUtils.isEmpty(result), + "The user name '%s' has existed", user.name()); + this.metaDriver.put(userKey(user.name()), serialize(user)); + } + + public HugeUser updateUser(HugeUser user) throws IOException { + String result = this.metaDriver.get(userKey(user.name())); + E.checkArgument(StringUtils.isNotEmpty(result), + "The user name '%s' does not existed", user.name()); + + HugeUser ori = HugeUser.fromMap(JsonUtil.fromJson(result, Map.class)); + ori.update(new Date()); + ori.nickname(user.nickname()); + ori.password(user.password()); + ori.phone(user.phone()); + ori.email(user.email()); + ori.avatar(user.avatar()); + ori.description(user.description()); + this.metaDriver.put(userKey(user.name()), serialize(ori)); + return ori; + } + + public HugeUser deleteUser(Id id) throws IOException, + ClassNotFoundException { + String result = this.metaDriver.get(userKey(id.asString())); + E.checkArgument(StringUtils.isNotEmpty(result), + "The user name '%s' does not existed", id.asString()); + this.metaDriver.delete(userKey(id.asString())); + this.putAuthEvent(new MetaManager.AuthEvent("DELETE", "USER", id.asString())); + Map map = JsonUtil.fromJson(result, Map.class); + return HugeUser.fromMap(map); + } + + @SuppressWarnings("unchecked") + public HugeUser findUser(String name) + throws IOException, ClassNotFoundException { + String result = this.metaDriver.get(userKey(name)); + if (StringUtils.isEmpty(result)) { + return null; + } + + return HugeUser.fromMap(JsonUtil.fromJson(result, Map.class)); + } + + public List listUsers(List ids) throws IOException, + ClassNotFoundException { + List result = new ArrayList<>(); + Map userMap = + this.metaDriver.scanWithPrefix(userListKey()); + for (Id id : ids) { + if (userMap.containsKey(userKey(id.asString()))) { + String value = userMap.get(userKey(id.asString())); + Map map = JsonUtil.fromJson(value, Map.class); + HugeUser user = HugeUser.fromMap(map); + result.add(user); + } + } + + return result; + } + + @SuppressWarnings("unchecked") + public List listUsersByGroup(String group, long limit) + throws IOException, ClassNotFoundException { + List result = new ArrayList<>(); + Map userMap = + this.metaDriver.scanWithPrefix(userListKey()); + for (Map.Entry item : userMap.entrySet()) { + if (limit >= 0 && result.size() >= limit) { + break; + } + Map map = JsonUtil.fromJson(item.getValue(), + Map.class); + HugeUser user = HugeUser.fromMap(map); + result.add(user); + } + + List belongs = new ArrayList<>(); + Map belongMap = this.metaDriver.scanWithPrefix( + belongListKey("*")); + for (Map.Entry item : belongMap.entrySet()) { + if (limit >= 0 && belongs.size() >= limit) { + break; + } + String groupName = arrayFromBelong(item.getKey())[2]; + if (groupName.equals(group)) { + Map map = JsonUtil.fromJson(item.getValue(), + Map.class); + HugeBelong belong = HugeBelong.fromMap(map); + belongs.add(belong); + } + } + + return result; + } + + @SuppressWarnings("unchecked") + public List listAllUsers(long limit) + throws IOException, + ClassNotFoundException { + List result = new ArrayList<>(); + Map userMap = + this.metaDriver.scanWithPrefix(userListKey()); + for (Map.Entry item : userMap.entrySet()) { + if (limit >= 0 && result.size() >= limit) { + break; + } + Map map = JsonUtil.fromJson(item.getValue(), + Map.class); + HugeUser user = HugeUser.fromMap(map); + result.add(user); + } + + return result; + } + + public Id createGroup(HugeGroup group) throws IOException { + String key = groupKey(group.name()); + String result = this.metaDriver.get(key); + E.checkArgument(StringUtils.isEmpty(result), + "The group name '%s' has existed", group.name()); + this.metaDriver.put(key, serialize(group)); + return group.id(); + } + + public HugeGroup updateGroup(HugeGroup group) throws IOException { + String key = groupKey(group.name()); + String result = this.metaDriver.get(key); + E.checkArgument(StringUtils.isNotEmpty(result), + "The group name '%s' is not existed", group.name()); + Map map = JsonUtil.fromJson(result, Map.class); + HugeGroup ori = HugeGroup.fromMap(map); + ori.update(new Date()); + ori.nickname(group.nickname()); + ori.description(group.description()); + this.metaDriver.put(key, serialize(ori)); + return ori; + } + + public HugeGroup deleteGroup(Id id) throws IOException, + ClassNotFoundException { + String name = id.asString(); + String key = groupKey(name); + String result = this.metaDriver.get(key); + E.checkArgument(StringUtils.isNotEmpty(result), + "The group name '%s' is not existed", name); + this.metaDriver.delete(key); + this.putAuthEvent(new MetaManager.AuthEvent("DELETE", "GROUP", + name)); + Map map = JsonUtil.fromJson(result, Map.class); + return HugeGroup.fromMap(map); + } + + public HugeGroup findGroup(String name) throws IOException, + ClassNotFoundException { + String result = this.metaDriver.get(groupKey(name)); + if (StringUtils.isEmpty(result)) { + return null; + } + + return HugeGroup.fromMap(JsonUtil.fromJson(result, Map.class)); + } + + public List listGroups(long limit) throws IOException, + ClassNotFoundException { + List result = new ArrayList<>(); + Map groupMap = + this.metaDriver.scanWithPrefix(groupListKey()); + for (Map.Entry item : groupMap.entrySet()) { + if (limit >= 0 && result.size() >= limit) { + break; + } + Map map = JsonUtil.fromJson(item.getValue(), + Map.class); + HugeGroup group = HugeGroup.fromMap(map); + result.add(group); + } + + return result; + } + + @SuppressWarnings("unchecked") + public Id createRole(String graphSpace, HugeRole role) + throws IOException { + Id roleId = IdGenerator.of(role.name()); + HugeRole existed = this.findRole(graphSpace, roleId); + // not support too many role to share same id + E.checkArgument(existed == null, "The role name '%s' has existed", + role.name()); + role.name(roleId.asString()); + + this.metaDriver.put(roleKey(graphSpace, role.name()), + serialize(role)); + return roleId; + } + + @SuppressWarnings("unchecked") + public HugeRole updateRole(String graphSpace, HugeRole role) + throws IOException { + String result = this.metaDriver.get(roleKey(graphSpace, role.name())); + E.checkArgument(StringUtils.isNotEmpty(result), + "The role name '%s' is not existed", role.name()); + + // only description and update-time could be updated + Map map = JsonUtil.fromJson(result, Map.class); + HugeRole ori = HugeRole.fromMap(map); + ori.update(new Date()); + ori.nickname(role.nickname()); + ori.description(role.description()); + this.metaDriver.put(roleKey(graphSpace, ori.name()), + serialize(ori)); + return ori; + } + + @SuppressWarnings("unchecked") + public HugeRole deleteRole(String graphSpace, Id id) + throws IOException, + ClassNotFoundException { + String result = this.metaDriver.get(roleKey(graphSpace, + id.asString())); + E.checkArgument(StringUtils.isNotEmpty(result), + "The role name '%s' is not existed", id.asString()); + this.metaDriver.delete(roleKey(graphSpace, id.asString())); + this.putAuthEvent(new MetaManager.AuthEvent("DELETE", "ROLE", id.asString())); + Map map = JsonUtil.fromJson(result, Map.class); + return HugeRole.fromMap(map); + } + + @SuppressWarnings("unchecked") + public HugeRole findRole(String graphSpace, Id id) { + String result = this.metaDriver.get(roleKey(graphSpace, + id.asString())); + if (StringUtils.isEmpty(result)) { + return null; + } + Map map = JsonUtil.fromJson(result, Map.class); + return HugeRole.fromMap(map); + } + + @SuppressWarnings("unchecked") + public HugeRole getRole(String graphSpace, Id id) + throws IOException, + ClassNotFoundException { + String result = this.metaDriver.get(roleKey(graphSpace, + id.asString())); + E.checkArgument(StringUtils.isNotEmpty(result), + "The role name '%s' is not existed", id.asString()); + Map map = JsonUtil.fromJson(result, Map.class); + return HugeRole.fromMap(map); + } + + @SuppressWarnings("unchecked") + public List listRoles(String graphSpace, List ids) + throws IOException, + ClassNotFoundException { + List result = new ArrayList<>(); + Map roleMap = + this.metaDriver.scanWithPrefix(roleListKey(graphSpace)); + for (Id id : ids) { + if (roleMap.containsKey(roleKey(graphSpace, id.asString()))) { + String roleString = roleMap.get(roleKey(graphSpace, + id.asString())); + Map map = JsonUtil.fromJson(roleString, + Map.class); + HugeRole role = HugeRole.fromMap(map); + result.add(role); + } + } + + return result; + } + + @SuppressWarnings("unchecked") + public List listAllRoles(String graphSpace, long limit) + throws IOException, + ClassNotFoundException { + List result = new ArrayList<>(); + Map roleMap = + this.metaDriver.scanWithPrefix(roleListKey(graphSpace)); + for (Map.Entry item : roleMap.entrySet()) { + if (limit >= 0 && result.size() >= limit) { + break; + } + Map map = JsonUtil.fromJson(item.getValue(), + Map.class); + HugeRole role = HugeRole.fromMap(map); + result.add(role); + } + + return result; + } + + public Id createTarget(String graphSpace, HugeTarget target) + throws IOException { + String result = this.metaDriver.get(targetKey(graphSpace, + target.name())); + E.checkArgument(StringUtils.isEmpty(result), + "The target name '%s' has existed", target.name()); + this.metaDriver.put(targetKey(graphSpace, target.name()), + serialize(target)); + return target.id(); + } + + @SuppressWarnings("unchecked") + public HugeTarget updateTarget(String graphSpace, HugeTarget target) + throws IOException { + String result = this.metaDriver.get(targetKey(graphSpace, + target.name())); + E.checkArgument(StringUtils.isNotEmpty(result), + "The target name '%s' is not existed", target.name()); + + // only resources and update-time could be updated + Map map = JsonUtil.fromJson(result, Map.class); + HugeTarget ori = HugeTarget.fromMap(map); + ori.update(new Date()); + ori.graph(target.graph()); + ori.description(target.description()); + ori.resources(target.resources()); + this.metaDriver.put(targetKey(graphSpace, target.name()), + serialize(ori)); + this.putAuthEvent(new MetaManager.AuthEvent("UPDATE", "TARGET", + ori.id().asString())); + return ori; + } + + @SuppressWarnings("unchecked") + public HugeTarget deleteTarget(String graphSpace, Id id) + throws IOException, + ClassNotFoundException { + String result = this.metaDriver.get(targetKey(graphSpace, + id.asString())); + E.checkArgument(StringUtils.isNotEmpty(result), + "The target name '%s' is not existed", id.asString()); + this.metaDriver.delete(targetKey(graphSpace, id.asString())); + this.putAuthEvent(new MetaManager.AuthEvent("DELETE", "TARGET", id.asString())); + Map map = JsonUtil.fromJson(result, Map.class); + return HugeTarget.fromMap(map); + } + + @SuppressWarnings("unchecked") + public HugeTarget findTarget(String graphSpace, Id id) { + String result = this.metaDriver.get(targetKey(graphSpace, + id.asString())); + if (StringUtils.isEmpty(result)) { + return null; + } + Map map = JsonUtil.fromJson(result, Map.class); + return HugeTarget.fromMap(map); + } + + @SuppressWarnings("unchecked") + public HugeTarget getTarget(String graphSpace, Id id) + throws IOException, + ClassNotFoundException { + String result = this.metaDriver.get(targetKey(graphSpace, + id.asString())); + E.checkArgument(StringUtils.isNotEmpty(result), + "The target name '%s' is not existed", id.asString()); + Map map = JsonUtil.fromJson(result, Map.class); + return HugeTarget.fromMap(map); + } + + @SuppressWarnings("unchecked") + public List listTargets(String graphSpace, List ids) + throws IOException, + ClassNotFoundException { + List result = new ArrayList<>(); + Map targetMap = + this.metaDriver.scanWithPrefix(targetListKey(graphSpace)); + for (Id id : ids) { + if (targetMap.containsKey(targetKey(graphSpace, id.asString()))) { + String targetString = targetMap.get(targetKey(graphSpace, + id.asString())); + Map map = JsonUtil.fromJson(targetString, + Map.class); + HugeTarget target = HugeTarget.fromMap(map); + result.add(target); + } + } + + return result; + } + + @SuppressWarnings("unchecked") + public List listAllTargets(String graphSpace, long limit) + throws IOException, + ClassNotFoundException { + List result = new ArrayList<>(); + Map targetMap = + this.metaDriver.scanWithPrefix(targetListKey(graphSpace)); + for (Map.Entry item : targetMap.entrySet()) { + if (limit >= 0 && result.size() >= limit) { + break; + } + Map map = JsonUtil.fromJson(item.getValue(), + Map.class); + HugeTarget target = HugeTarget.fromMap(map); + result.add(target); + } + + return result; + } + + public Id createBelong(String graphSpace, HugeBelong belong) + throws IOException, ClassNotFoundException { + String belongId = this.checkBelong(graphSpace, belong); + String result = this.metaDriver.get(belongKey(graphSpace, belongId)); + E.checkArgument(StringUtils.isEmpty(result), + "The belong name '%s' has existed", belongId); + this.metaDriver.put(belongKey(graphSpace, belongId), serialize(belong)); + this.putAuthEvent(new MetaManager.AuthEvent("CREATE", "BELONG", belongId)); + return IdGenerator.of(belongId); + } + + @SuppressWarnings("unchecked") + public HugeBelong updateBelong(String graphSpace, HugeBelong belong) + throws IOException, ClassNotFoundException { + String belongId = this.checkBelong(graphSpace, belong); + String result = this.metaDriver.get(belongKey(graphSpace, belongId)); + E.checkArgument(StringUtils.isNotEmpty(result), + "The belong name '%s' is not existed", belongId); + + // only description and update-time could be updated + Map map = JsonUtil.fromJson(result, Map.class); + HugeBelong ori = HugeBelong.fromMap(map); + ori.update(new Date()); + ori.description(belong.description()); + this.metaDriver.put(belongKey(graphSpace, belongId), serialize(ori)); + return ori; + } + + public String checkBelong(String graphSpace, HugeBelong belong) + throws IOException, ClassNotFoundException { + String source = belong.source().asString(); + String target = belong.target().asString(); + String link = belong.link(); + HugeUser user = this.findUser(source); + HugeGroup group = this.findGroup(source); + E.checkArgument(user != null || group != null, + "The source name '%s' is not existed", + source); + HugeGroup groupTarget = this.findGroup(target); + HugeRole role = this.findRole(graphSpace, belong.target()); + E.checkArgument(role != null || groupTarget != null, + "The target name '%s' is not existed", + target); + + return belongId(source, target, link); + } + + @SuppressWarnings("unchecked") + public HugeBelong deleteBelong(String graphSpace, Id id) + throws IOException, + ClassNotFoundException { + String result = this.metaDriver.get(belongKey(graphSpace, + id.asString())); + E.checkArgument(StringUtils.isNotEmpty(result), + "The belong name '%s' is not existed", id.asString()); + this.metaDriver.delete(belongKey(graphSpace, id.asString())); + this.putAuthEvent(new MetaManager.AuthEvent("DELETE", "BELONG", id.asString())); + Map map = JsonUtil.fromJson(result, Map.class); + return HugeBelong.fromMap(map); + } + + @SuppressWarnings("unchecked") + public HugeBelong getBelong(String graphSpace, Id id) + throws IOException, + ClassNotFoundException { + String result = this.metaDriver.get(belongKey(graphSpace, + id.asString())); + E.checkArgument(StringUtils.isNotEmpty(result), + "The belong name '%s' is not existed", id.asString()); + + Map map = JsonUtil.fromJson(result, Map.class); + return HugeBelong.fromMap(map); + } + + public boolean existBelong(String graphSpace, Id id) { + String result = this.metaDriver.get(belongKey(graphSpace, + id.asString())); + return StringUtils.isNotEmpty(result); + } + + @SuppressWarnings("unchecked") + public List listBelong(String graphSpace, List ids) + throws IOException, + ClassNotFoundException { + List result = new ArrayList<>(); + Map belongMap = + this.metaDriver.scanWithPrefix(belongListKey(graphSpace)); + for (Id id : ids) { + if (belongMap.containsKey(belongKey(graphSpace, id.asString()))) { + String belongString = belongMap.get(belongKey(graphSpace, + id.asString())); + Map map = JsonUtil.fromJson(belongString, + Map.class); + HugeBelong belong = HugeBelong.fromMap(map); + result.add(belong); + } + } + + return result; + } + + @SuppressWarnings("unchecked") + public List listAllBelong(String graphSpace, long limit) + throws IOException, + ClassNotFoundException { + List result = new ArrayList<>(); + Map belongMap = + this.metaDriver.scanWithPrefix(belongListKey(graphSpace)); + for (Map.Entry item : belongMap.entrySet()) { + if (limit >= 0 && result.size() >= limit) { + break; + } + Map map = JsonUtil.fromJson(item.getValue(), + Map.class); + HugeBelong belong = HugeBelong.fromMap(map); + result.add(belong); + } + + return result; + } + + @SuppressWarnings("unchecked") + public List listBelongBySource(String graphSpace, Id source, + String link, long limit) + throws IOException, + ClassNotFoundException { + List result = new ArrayList<>(); + + String sourceLink = (HugeBelong.ALL.equals(link)) ? source.asString() : + source.asString() + "->" + link; + + String key = belongListKeyBySource(graphSpace, sourceLink); + + Map belongMap = this.metaDriver.scanWithPrefix(key); + for (Map.Entry item : belongMap.entrySet()) { + if (limit >= 0 && result.size() >= limit) { + break; + } + Map map = JsonUtil.fromJson(item.getValue(), + Map.class); + HugeBelong belong = HugeBelong.fromMap(map); + result.add(belong); + } + + return result; + } + + public String[] arrayFromBelong(String belongKey) { + E.checkArgument(StringUtils.isNotEmpty(belongKey), + "The belong name '%s' is empty", belongKey); + E.checkArgument(belongKey.contains("->"), + "The belong name '%s' is invalid", belongKey); + String[] items = belongKey.split("->"); + E.checkArgument(items.length == 3, + "The belong name '%s' is invalid", belongKey); + return items; + } + + @SuppressWarnings("unchecked") + public List listBelongByTarget(String graphSpace, + Id role, String link, long limit) + throws IOException, + ClassNotFoundException { + List result = new ArrayList<>(); + Map belongMap = this.metaDriver.scanWithPrefix( + belongListKey(graphSpace)); + for (Map.Entry item : belongMap.entrySet()) { + if (limit >= 0 && result.size() >= limit) { + break; + } + String[] array = arrayFromBelong(item.getKey()); + String linkName = array[1]; + String roleName = array[2]; + if ((linkName.equals(link) || "*".equals(link)) && + roleName.equals(role.asString())) { + Map map = JsonUtil.fromJson(item.getValue(), + Map.class); + HugeBelong belong = HugeBelong.fromMap(map); + result.add(belong); + } + } + + return result; + } + + public Id createAccess(String graphSpace, HugeAccess access) + throws IOException, ClassNotFoundException { + String accessId = this.checkAccess(graphSpace, access); + String result = this.metaDriver.get(accessKey(graphSpace, accessId)); + E.checkArgument(StringUtils.isEmpty(result), + "The access name '%s' has existed", accessId); + this.metaDriver.put(accessKey(graphSpace, accessId), serialize(access)); + this.putAuthEvent(new MetaManager.AuthEvent("CREATE", "ACCESS", accessId)); + return IdGenerator.of(accessId); + } + + @SuppressWarnings("unchecked") + public HugeAccess updateAccess(String graphSpace, HugeAccess access) + throws IOException, ClassNotFoundException { + String accessId = this.checkAccess(graphSpace, access); + String result = this.metaDriver.get(accessKey(graphSpace, accessId)); + E.checkArgument(StringUtils.isNotEmpty(result), + "The access name '%s' is not existed", accessId); + Map map = JsonUtil.fromJson(result, Map.class); + HugeAccess existed = HugeAccess.fromMap(map); + E.checkArgument(existed.permission().code() == + access.permission().code(), + "The access name '%s' has existed", accessId); + + // only description and update-time could be updated + Map oriMap = JsonUtil.fromJson(result, Map.class); + HugeAccess ori = HugeAccess.fromMap(oriMap); + ori.update(new Date()); + ori.description(access.description()); + this.metaDriver.put(accessKey(graphSpace, accessId), serialize(ori)); + return ori; + } + + public String checkAccess(String graphSpace, HugeAccess access) + throws IOException, ClassNotFoundException { + HugeRole role = this.getRole(graphSpace, access.source()); + E.checkArgument(role != null, + "The role name '%s' is not existed", + access.source().asString()); + + HugeTarget target = this.getTarget(graphSpace, access.target()); + E.checkArgument(target != null, + "The target name '%s' is not existed", + access.target().asString()); + + return accessId(role.name(), target.name(), access.permission()); + } + + @SuppressWarnings("unchecked") + public HugeAccess deleteAccess(String graphSpace, Id id) + throws IOException, ClassNotFoundException { + String result = this.metaDriver.get(accessKey(graphSpace, + id.asString())); + E.checkArgument(StringUtils.isNotEmpty(result), + "The access name '%s' is not existed", id.asString()); + this.metaDriver.delete(accessKey(graphSpace, id.asString())); + this.putAuthEvent(new MetaManager.AuthEvent("DELETE", "ACCESS", id.asString())); + Map map = JsonUtil.fromJson(result, Map.class); + return HugeAccess.fromMap(map); + } + + @SuppressWarnings("unchecked") + public HugeAccess findAccess(String graphSpace, Id id) { + String result = this.metaDriver.get(accessKey(graphSpace, + id.asString())); + if (StringUtils.isEmpty(result)) { + return null; + } + Map map = JsonUtil.fromJson(result, Map.class); + return HugeAccess.fromMap(map); + } + + @SuppressWarnings("unchecked") + public HugeAccess getAccess(String graphSpace, Id id) + throws IOException, ClassNotFoundException { + String result = this.metaDriver.get(accessKey(graphSpace, + id.asString())); + E.checkArgument(StringUtils.isNotEmpty(result), + "The access name '%s' is not existed", id.asString()); + Map map = JsonUtil.fromJson(result, Map.class); + return HugeAccess.fromMap(map); + } + + @SuppressWarnings("unchecked") + public List listAccess(String graphSpace, List ids) + throws IOException, + ClassNotFoundException { + List result = new ArrayList<>(); + Map accessMap = + this.metaDriver.scanWithPrefix(accessListKey(graphSpace)); + for (Id id : ids) { + if (accessMap.containsKey(accessKey(graphSpace, id.asString()))) { + String accessString = accessMap.get(accessKey(graphSpace, + id.asString())); + Map map = JsonUtil.fromJson(accessString, + Map.class); + HugeAccess access = HugeAccess.fromMap(map); + result.add(access); + } + } + + return result; + } + + @SuppressWarnings("unchecked") + public List listAllAccess(String graphSpace, long limit) + throws IOException, + ClassNotFoundException { + List result = new ArrayList<>(); + Map accessMap = + this.metaDriver.scanWithPrefix(accessListKey(graphSpace)); + for (Map.Entry item : accessMap.entrySet()) { + if (limit >= 0 && result.size() >= limit) { + break; + } + Map map = JsonUtil.fromJson(item.getValue(), + Map.class); + HugeAccess access = HugeAccess.fromMap(map); + result.add(access); + } + + return result; + } + + @SuppressWarnings("unchecked") + public List listAccessByRole(String graphSpace, + Id role, long limit) + throws IOException, + ClassNotFoundException { + List result = new ArrayList<>(); + Map accessMap = this.metaDriver.scanWithPrefix( + accessListKeyByRole(graphSpace, role.asString())); + for (Map.Entry item : accessMap.entrySet()) { + if (limit >= 0 && result.size() >= limit) { + break; + } + Map map = JsonUtil.fromJson(item.getValue(), + Map.class); + HugeAccess access = HugeAccess.fromMap(map); + result.add(access); + } + + return result; + } + + public String targetFromAccess(String accessKey) { + E.checkArgument(StringUtils.isNotEmpty(accessKey), + "The access name '%s' is empty", accessKey); + E.checkArgument(accessKey.contains("->"), + "The access name '%s' is invalid", accessKey); + String[] items = accessKey.split("->"); + E.checkArgument(items.length == 3, + "The access name '%s' is invalid", accessKey); + return items[2]; + } + + public void clearGraphAuth(String graphSpace) { + E.checkArgument(StringUtils.isNotEmpty(graphSpace), + "The graphSpace is empty"); + String prefix = this.authPrefix(graphSpace); + this.metaDriver.deleteWithPrefix(prefix); + } + + @SuppressWarnings("unchecked") + public List listAccessByTarget(String graphSpace, + Id target, long limit) + throws IOException, + ClassNotFoundException { + List result = new ArrayList<>(); + Map accessMap = this.metaDriver.scanWithPrefix( + accessListKey(graphSpace)); + for (Map.Entry item : accessMap.entrySet()) { + if (limit >= 0 && result.size() >= limit) { + break; + } + String targetName = targetFromAccess(item.getKey()); + if (targetName.equals(target.asString())) { + Map map = JsonUtil.fromJson(item.getValue(), + Map.class); + HugeAccess access = HugeAccess.fromMap(map); + result.add(access); + } + } + + return result; + } + + public void listenAuthEvent(Consumer consumer) { + this.listen(this.authEventKey(), consumer); + } + + public void putAuthEvent(MetaManager.AuthEvent event) { + this.metaDriver.put(authEventKey(), JsonUtil.toJson(event.asMap())); + } + + public String belongId(String source, String target, String link) { + E.checkArgument(StringUtils.isNotEmpty(source) && + StringUtils.isNotEmpty(target), + "The source name '%s' or target name '%s' is empty", + source, target); + return String.join("->", source, link, target); + } + + public String accessId(String roleName, String targetName, + HugePermission permission) { + E.checkArgument(StringUtils.isNotEmpty(roleName) && + StringUtils.isNotEmpty(targetName), + "The role name '%s' or target name '%s' is empty", + roleName, targetName); + String code = String.valueOf(permission.code()); + return String.join("->", roleName, code, targetName); + } + + public String authEventKey() { + // HUGEGRAPH/{cluster}/AUTH_EVENT + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_AUTH_EVENT); + } + + private String userKey(String name) { + // HUGEGRAPH/{cluster}/AUTH/USER/{user} + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_AUTH, + META_PATH_USER, + name); + } + + private String userListKey() { + // HUGEGRAPH/{cluster}/AUTH/USER + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_AUTH, + META_PATH_USER); + } + + private String authPrefix(String graphSpace) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphSpace}/AUTH + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + META_PATH_AUTH); + } + + private String groupKey(String group) { + // HUGEGRAPH/{cluster}/AUTH/GROUP/{group} + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_AUTH, + META_PATH_GROUP, + group); + } + + private String groupListKey() { + // HUGEGRAPH/{cluster}/AUTH/GROUP + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_AUTH, + META_PATH_GROUP); + } + + private String roleKey(String graphSpace, String role) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphSpace}/AUTH/ROLE/{role} + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + META_PATH_AUTH, + META_PATH_ROLE, + role); + } + + private String roleListKey(String graphSpace) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphSpace}/AUTH/ROLE + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + META_PATH_AUTH, + META_PATH_ROLE); + } + + private String targetKey(String graphSpace, String target) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphSpace}/AUTH/TARGET/{target} + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + META_PATH_AUTH, + META_PATH_TARGET, + target); + } + + private String targetListKey(String graphSpace) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphSpace}/AUTH/TARGET + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + META_PATH_AUTH, + META_PATH_TARGET); + } + + private String belongKey(String graphSpace, String belong) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphSpace}/AUTH/BELONG/{belong} + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + META_PATH_AUTH, + META_PATH_BELONG, + belong); + } + + private String belongListKey(String graphSpace) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphSpace}/AUTH/BELONG + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + META_PATH_AUTH, + META_PATH_BELONG); + } + + private String belongListKeyBySource(String graphSpace, String source) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphSpace}/AUTH/BELONG/{userName} + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + META_PATH_AUTH, + META_PATH_BELONG, + source + "->"); + } + + private String accessKey(String graphSpace, String access) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphSpace}/AUTH/ACCESS/{role->op->target} + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + META_PATH_AUTH, + META_PATH_ACCESS, + access); + } + + private String accessListKey(String graphSpace) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphSpace}/AUTH/ACCESS + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + META_PATH_AUTH, + META_PATH_ACCESS); + } + + private String accessListKeyByRole(String graphSpace, String roleName) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphSpace}/AUTH/ACCESS/{roleName} + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + META_PATH_AUTH, + META_PATH_ACCESS, + roleName + "->"); + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/ConfigMetaManager.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/ConfigMetaManager.java new file mode 100644 index 0000000000..280c80bc75 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/ConfigMetaManager.java @@ -0,0 +1,149 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.meta.managers; + +import static org.apache.hugegraph.meta.MetaManager.META_PATH_DELIMITER; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_GRAPHSPACE; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_GREMLIN_YAML; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_HUGEGRAPH; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_REST_PROPERTIES; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_SERVICE; + +import java.util.Map; +import java.util.function.Consumer; + +import org.apache.commons.lang3.StringUtils; +import org.apache.hugegraph.meta.MetaDriver; +import org.apache.hugegraph.util.JsonUtil; + +public class ConfigMetaManager extends AbstractMetaManager { + + public ConfigMetaManager(MetaDriver metaDriver, String cluster) { + super(metaDriver, cluster); + } + + @SuppressWarnings("unchecked") + public Map restProperties(String graphSpace, + String serviceId) { + Map map = null; + String result = this.metaDriver.get(restPropertiesKey(graphSpace, + serviceId)); + if (StringUtils.isNotEmpty(result)) { + map = JsonUtil.fromJson(result, Map.class); + } + return map; + } + + @SuppressWarnings("unchecked") + public Map restProperties(String graphSpace, + String serviceId, + Map properties) { + Map map; + String result = this.metaDriver.get(restPropertiesKey(graphSpace, + serviceId)); + if (StringUtils.isNotEmpty(result)) { + map = JsonUtil.fromJson(result, Map.class); + for (Map.Entry item : properties.entrySet()) { + map.put(item.getKey(), item.getValue()); + } + } else { + map = properties; + } + this.metaDriver.put(restPropertiesKey(graphSpace, serviceId), + JsonUtil.toJson(map)); + return map; + } + + @SuppressWarnings("unchecked") + public Map deleteRestProperties(String graphSpace, + String serviceId, + String key) { + Map map = null; + String result = this.metaDriver.get(restPropertiesKey(graphSpace, + serviceId)); + if (StringUtils.isNotEmpty(result)) { + map = JsonUtil.fromJson(result, Map.class); + map.remove(key); + this.metaDriver.put(restPropertiesKey(graphSpace, serviceId), + JsonUtil.toJson(map)); + } + return map; + } + + @SuppressWarnings("unchecked") + public Map clearRestProperties(String graphSpace, + String serviceId) { + Map map = null; + String key = restPropertiesKey(graphSpace, serviceId); + String result = this.metaDriver.get(key); + if (StringUtils.isNotEmpty(result)) { + map = JsonUtil.fromJson(result, Map.class); + this.metaDriver.delete(key); + } + return map; + } + + public String gremlinYaml(String graphSpace, String serviceId) { + return this.metaDriver.get(gremlinYamlKey(graphSpace, serviceId)); + } + + public String gremlinYaml(String graphSpace, String serviceId, + String yaml) { + this.metaDriver.put(gremlinYamlKey(graphSpace, serviceId), yaml); + return yaml; + } + + public void listenRestPropertiesUpdate(String graphSpace, + String serviceId, + Consumer consumer) { + this.listen(this.restPropertiesKey(graphSpace, serviceId), consumer); + } + + public void listenGremlinYamlUpdate(String graphSpace, + String serviceId, + Consumer consumer) { + this.listen(this.gremlinYamlKey(graphSpace, serviceId), consumer); + } + + + private String restPropertiesKey(String graphSpace, String serviceId) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphspace}/SERVICE/ + // {serviceId}/REST_PROPERTIES + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + META_PATH_SERVICE, + serviceId, + META_PATH_REST_PROPERTIES); + } + + private String gremlinYamlKey(String graphSpace, String serviceId) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphspace}/SERVICE/ + // {serviceId}/GREMLIN_YAML + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + META_PATH_SERVICE, + serviceId, + META_PATH_GREMLIN_YAML); + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/GraphMetaManager.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/GraphMetaManager.java new file mode 100644 index 0000000000..859746bd9b --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/GraphMetaManager.java @@ -0,0 +1,284 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.meta.managers; + +import static org.apache.hugegraph.meta.MetaManager.META_PATH_ADD; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_CLEAR; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_DELIMITER; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_EDGE_LABEL; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_EVENT; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_GRAPH; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_GRAPHSPACE; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_GRAPH_CONF; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_HUGEGRAPH; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_JOIN; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_REMOVE; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_SCHEMA; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_UPDATE; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_VERTEX_LABEL; + +import java.util.Map; +import java.util.function.Consumer; + +import org.apache.hugegraph.meta.MetaDriver; +import org.apache.hugegraph.type.define.CollectionType; +import org.apache.hugegraph.util.JsonUtil; +import org.apache.hugegraph.util.collection.CollectionFactory; +import org.apache.logging.log4j.util.Strings; + +public class GraphMetaManager extends AbstractMetaManager { + + public GraphMetaManager(MetaDriver metaDriver, String cluster) { + super(metaDriver, cluster); + } + + private static String graphName(String graphSpace, String name) { + return String.join(META_PATH_JOIN, graphSpace, name); + } + + public Map> graphConfigs(String graphSpace) { + Map> configs = + CollectionFactory.newMap(CollectionType.EC); + Map keyValues = this.metaDriver.scanWithPrefix( + this.graphConfPrefix(graphSpace)); + for (Map.Entry entry : keyValues.entrySet()) { + String key = entry.getKey(); + String[] parts = key.split(META_PATH_DELIMITER); + String name = parts[parts.length - 1]; + String graphName = String.join("-", graphSpace, name); + configs.put(graphName, configMap(entry.getValue())); + } + return configs; + } + + public void removeGraphConfig(String graphSpace, String graph) { + this.metaDriver.delete(this.graphConfKey(graphSpace, graph)); + } + + public void notifyGraphAdd(String graphSpace, String graph) { + this.metaDriver.put(this.graphAddKey(), + graphName(graphSpace, graph)); + } + + public void notifyGraphRemove(String graphSpace, String graph) { + this.metaDriver.put(this.graphRemoveKey(), + graphName(graphSpace, graph)); + } + + public void notifyGraphUpdate(String graphSpace, String graph) { + this.metaDriver.put(this.graphUpdateKey(), + graphName(graphSpace, graph)); + } + + public void notifyGraphClear(String graphSpace, String graph) { + this.metaDriver.put(this.graphClearKey(), + graphName(graphSpace, graph)); + } + + public void notifySchemaCacheClear(String graphSpace, String graph) { + this.metaDriver.put(this.schemaCacheClearKey(), + graphName(graphSpace, graph)); + } + + public void notifyGraphCacheClear(String graphSpace, String graph) { + this.metaDriver.put(this.graphCacheClearKey(), + graphName(graphSpace, graph)); + } + + /** + * 通知 点信息 cache clear + * + * @param graphSpace + * @param graph + */ + public void notifyGraphVertexCacheClear(String graphSpace, String graph) { + this.metaDriver.put(this.graphVertexCacheClearKey(), + graphName(graphSpace, graph)); + } + + /** + * 通知 边信息 cache clear + * + * @param graphSpace + * @param graph + */ + public void notifyGraphEdgeCacheClear(String graphSpace, String graph) { + this.metaDriver.put(this.graphEdgeCacheClearKey(), + graphName(graphSpace, graph)); + } + + public Map getGraphConfig(String graphSpace, String graph) { + return configMap(this.metaDriver.get(this.graphConfKey(graphSpace, + graph))); + } + + public void addGraphConfig(String graphSpace, String graph, + Map configs) { + this.metaDriver.put(this.graphConfKey(graphSpace, graph), + JsonUtil.toJson(configs)); + } + + public void updateGraphConfig(String graphSpace, String graph, + Map configs) { + this.metaDriver.put(this.graphConfKey(graphSpace, graph), + JsonUtil.toJson(configs)); + } + + public void listenGraphAdd(Consumer consumer) { + this.listen(this.graphAddKey(), consumer); + } + + public void listenGraphUpdate(Consumer consumer) { + this.listen(this.graphUpdateKey(), consumer); + } + + public void listenGraphRemove(Consumer consumer) { + this.listen(this.graphRemoveKey(), consumer); + } + + public void listenGraphClear(Consumer consumer) { + this.listen(this.graphClearKey(), consumer); + } + + public void listenSchemaCacheClear(Consumer consumer) { + this.listen(this.schemaCacheClearKey(), consumer); + } + + public void listenGraphCacheClear(Consumer consumer) { + this.listen(this.graphCacheClearKey(), consumer); + } + + public void listenGraphVertexCacheClear(Consumer consumer) { + this.listen(this.graphVertexCacheClearKey(), consumer); + } + + public void listenGraphEdgeCacheClear(Consumer consumer) { + this.listen(this.graphEdgeCacheClearKey(), consumer); + } + + private String graphConfPrefix(String graphSpace) { + return this.graphConfKey(graphSpace, Strings.EMPTY); + } + + private String graphConfKey(String graphSpace, String graph) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphspace}/GRAPH_CONF/{graph} + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + META_PATH_GRAPH_CONF, + graph); + } + + private String graphAddKey() { + // HUGEGRAPH/{cluster}/EVENT/GRAPH/ADD + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_EVENT, + META_PATH_GRAPH, + META_PATH_ADD); + } + + private String graphRemoveKey() { + // HUGEGRAPH/{cluster}/EVENT/GRAPH/REMOVE + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_EVENT, + META_PATH_GRAPH, + META_PATH_REMOVE); + } + + private String graphUpdateKey() { + // HUGEGRAPH/{cluster}/EVENT/GRAPH/UPDATE + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_EVENT, + META_PATH_GRAPH, + META_PATH_UPDATE); + } + + private String graphClearKey() { + // HUGEGRAPH/{cluster}/EVENT/GRAPH/CLEAR + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_EVENT, + META_PATH_GRAPH, + META_PATH_CLEAR); + } + + private String schemaCacheClearKey() { + // HUGEGRAPH/{cluster}/EVENT/GRAPH/SCHEMA/CLEAR + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_EVENT, + META_PATH_GRAPH, + META_PATH_SCHEMA, + META_PATH_CLEAR); + } + + private String graphCacheClearKey() { + // HUGEGRAPH/{cluster}/EVENT/GRAPH/GRAPH/CLEAR + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_EVENT, + META_PATH_GRAPH, + META_PATH_GRAPH, + META_PATH_CLEAR); + } + + /** + * pd监听 vertex label更新的key + * + * @return + */ + private String graphVertexCacheClearKey() { + // HUGEGRAPH/{cluster}/EVENT/GRAPH/GRAPH/META_PATH_VERTEX_LABEL/CLEAR + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_EVENT, + META_PATH_GRAPH, + META_PATH_GRAPH, + META_PATH_VERTEX_LABEL, + META_PATH_CLEAR); + } + + /** + * pd监听 edge label更新的key + * + * @return + */ + private String graphEdgeCacheClearKey() { + // HUGEGRAPH/{cluster}/EVENT/GRAPH/GRAPH/META_PATH_EDGE_LABEL/CLEAR + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_EVENT, + META_PATH_GRAPH, + META_PATH_GRAPH, + META_PATH_EDGE_LABEL, + META_PATH_CLEAR); + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/KafkaMetaManager.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/KafkaMetaManager.java new file mode 100644 index 0000000000..67d469aa5a --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/KafkaMetaManager.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.meta.managers; + +import static org.apache.hugegraph.meta.MetaManager.META_PATH_DELIMITER; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_HUGEGRAPH; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_KAFKA; + +import java.util.function.Consumer; + +import org.apache.hugegraph.meta.MetaDriver; + +public class KafkaMetaManager extends AbstractMetaManager { + + public KafkaMetaManager(MetaDriver metaDriver, String cluster) { + super(metaDriver, cluster); + } + + public void listenKafkaConfig(Consumer consumer) { + String prefix = this.kafkaPrefixKey(); + this.listenPrefix(prefix, consumer); + } + + private String kafkaPrefixKey() { + // HUGEGRAPH/{cluster}/KAFKA + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_KAFKA); + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/LockMetaManager.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/LockMetaManager.java new file mode 100644 index 0000000000..7910887266 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/LockMetaManager.java @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.meta.managers; + +import org.apache.hugegraph.meta.MetaDriver; + +public class LockMetaManager extends AbstractMetaManager { + + public LockMetaManager(MetaDriver metaDriver, String cluster) { + super(metaDriver, cluster); + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/SchemaMetaManager.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/SchemaMetaManager.java new file mode 100644 index 0000000000..57e2839837 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/SchemaMetaManager.java @@ -0,0 +1,517 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.meta.managers; + +import static org.apache.hugegraph.meta.MetaManager.META_PATH_DELIMITER; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_EDGE_LABEL; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_GRAPHSPACE; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_HUGEGRAPH; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_ID; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_INDEX_LABEL; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_NAME; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_PROPERTY_KEY; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_SCHEMA; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_VERTEX_LABEL; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import org.apache.hugegraph.HugeGraph; +import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.backend.id.IdGenerator; +import org.apache.hugegraph.meta.MetaDriver; +import org.apache.hugegraph.meta.PdMetaDriver; +import org.apache.hugegraph.schema.EdgeLabel; +import org.apache.hugegraph.schema.IndexLabel; +import org.apache.hugegraph.schema.PropertyKey; +import org.apache.hugegraph.schema.VertexLabel; +import org.apache.hugegraph.util.JsonUtil; + +public class SchemaMetaManager extends AbstractMetaManager { + private final HugeGraph graph; + + public SchemaMetaManager(MetaDriver metaDriver, String cluster, HugeGraph graph) { + super(metaDriver, cluster); + this.graph = graph; + } + + public static void main(String[] args) { + MetaDriver metaDriver = new PdMetaDriver("127.0.0.1:8686"); + SchemaMetaManager schemaMetaManager = new SchemaMetaManager(metaDriver, "hg", null); + PropertyKey propertyKey = new PropertyKey(null, IdGenerator.of(5), "test"); + propertyKey.userdata("key1", "value1"); + propertyKey.userdata("key2", 23); + schemaMetaManager.addPropertyKey("DEFAULT1", "hugegraph", propertyKey); + +// PropertyKey propertyKey1 = schemaMetaManager.getPropertyKey("DEFAULT1", "hugegraph", +// IdGenerator.of(1)); + schemaMetaManager.removePropertyKey("DEFAULT", "hugegraph", IdGenerator.of(1)); + +// propertyKey1 = schemaMetaManager.getPropertyKey("DEFAULT1", "hugegraph", "test"); +// System.out.println(propertyKey1 ); +// +// propertyKey1 = schemaMetaManager.getPropertyKey("DEFAULT1", "hugegraph", "5"); +// System.out.println(propertyKey1 ); + } + + public void addPropertyKey(String graphSpace, String graph, + PropertyKey propertyKey) { + String content = serialize(propertyKey); + this.metaDriver.put(propertyKeyIdKey(graphSpace, graph, + propertyKey.id()), content); + this.metaDriver.put(propertyKeyNameKey(graphSpace, graph, + propertyKey.name()), content); + } + + public void updatePropertyKey(String graphSpace, String graph, + PropertyKey pkey) { + this.addPropertyKey(graphSpace, graph, pkey); + } + + @SuppressWarnings("unchecked") + public PropertyKey getPropertyKey(String graphSpace, String graph, + Id propertyKey) { + String content = this.metaDriver.get(propertyKeyIdKey(graphSpace, graph, + propertyKey)); + if (content == null || content.length() == 0) { + return null; + } else { + return PropertyKey.fromMap(JsonUtil.fromJson(content, Map.class), this.graph); + } + } + + @SuppressWarnings("unchecked") + public PropertyKey getPropertyKey(String graphSpace, String graph, + String propertyKey) { + String content = this.metaDriver.get(propertyKeyNameKey(graphSpace, + graph, + propertyKey)); + if (content == null || content.length() == 0) { + return null; + } else { + return PropertyKey.fromMap(JsonUtil.fromJson(content, Map.class), this.graph); + } + } + + @SuppressWarnings("unchecked") + public List getPropertyKeys(String graphSpace, String graph) { + Map propertyKeysKvs = this.metaDriver.scanWithPrefix( + propertyKeyPrefix(graphSpace, graph)); + List propertyKeys = + new ArrayList<>(propertyKeysKvs.size()); + for (String value : propertyKeysKvs.values()) { + propertyKeys.add(PropertyKey.fromMap(JsonUtil.fromJson(value, Map.class), this.graph)); + } + return propertyKeys; + } + + public Id removePropertyKey(String graphSpace, String graph, + Id propertyKey) { + PropertyKey p = this.getPropertyKey(graphSpace, graph, propertyKey); + this.metaDriver.delete(propertyKeyNameKey(graphSpace, graph, + p.name())); + this.metaDriver.delete(propertyKeyIdKey(graphSpace, graph, + propertyKey)); + return IdGenerator.ZERO; + } + + public void addVertexLabel(String graphSpace, String graph, + VertexLabel vertexLabel) { + String content = serialize(vertexLabel); + this.metaDriver.put(vertexLabelIdKey(graphSpace, graph, + vertexLabel.id()), content); + this.metaDriver.put(vertexLabelNameKey(graphSpace, graph, + vertexLabel.name()), content); + } + + public void updateVertexLabel(String graphSpace, String graph, + VertexLabel vertexLabel) { + this.addVertexLabel(graphSpace, graph, vertexLabel); + } + + @SuppressWarnings("unchecked") + public VertexLabel getVertexLabel(String graphSpace, String graph, + Id vertexLabel) { + String content = this.metaDriver.get(vertexLabelIdKey(graphSpace, graph, + vertexLabel)); + if (content == null || content.length() == 0) { + return null; + } else { + return VertexLabel.fromMap(JsonUtil.fromJson(content, Map.class), this.graph); + } + } + + @SuppressWarnings("unchecked") + public VertexLabel getVertexLabel(String graphSpace, String graph, + String vertexLabel) { + String content = this.metaDriver.get(vertexLabelNameKey(graphSpace, + graph, + vertexLabel)); + if (content == null || content.length() == 0) { + return null; + } else { + return VertexLabel.fromMap(JsonUtil.fromJson(content, Map.class), this.graph); + } + } + + @SuppressWarnings("unchecked") + public List getVertexLabels(String graphSpace, String graph) { + Map vertexLabelKvs = this.metaDriver.scanWithPrefix( + vertexLabelPrefix(graphSpace, graph)); + List vertexLabels = + new ArrayList<>(vertexLabelKvs.size()); + for (String value : vertexLabelKvs.values()) { + vertexLabels.add(VertexLabel.fromMap( + JsonUtil.fromJson(value, Map.class), this.graph)); + } + return vertexLabels; + } + + public Id removeVertexLabel(String graphSpace, String graph, + Id vertexLabel) { + VertexLabel v = this.getVertexLabel(graphSpace, graph, + vertexLabel); + this.metaDriver.delete(vertexLabelNameKey(graphSpace, graph, + v.name())); + this.metaDriver.delete(vertexLabelIdKey(graphSpace, graph, + vertexLabel)); + return IdGenerator.ZERO; + } + + public void addEdgeLabel(String graphSpace, String graph, + EdgeLabel edgeLabel) { + String content = serialize(edgeLabel); + this.metaDriver.put(edgeLabelIdKey(graphSpace, graph, + edgeLabel.id()), content); + this.metaDriver.put(edgeLabelNameKey(graphSpace, graph, + edgeLabel.name()), content); + } + + public void updateEdgeLabel(String graphSpace, String graph, + EdgeLabel edgeLabel) { + this.addEdgeLabel(graphSpace, graph, edgeLabel); + } + + @SuppressWarnings("unchecked") + public EdgeLabel getEdgeLabel(String graphSpace, String graph, + Id edgeLabel) { + String content = this.metaDriver.get(edgeLabelIdKey(graphSpace, graph, + edgeLabel)); + if (content == null || content.length() == 0) { + return null; + } else { + return EdgeLabel.fromMap(JsonUtil.fromJson(content, Map.class), this.graph); + } + } + + @SuppressWarnings("unchecked") + public EdgeLabel getEdgeLabel(String graphSpace, String graph, + String edgeLabel) { + String content = this.metaDriver.get(edgeLabelNameKey(graphSpace, + graph, + edgeLabel)); + if (content == null || content.length() == 0) { + return null; + } else { + return EdgeLabel.fromMap(JsonUtil.fromJson(content, Map.class), this.graph); + } + } + + @SuppressWarnings("unchecked") + public List getEdgeLabels(String graphSpace, String graph) { + Map edgeLabelKvs = this.metaDriver.scanWithPrefix( + edgeLabelPrefix(graphSpace, graph)); + List edgeLabels = + new ArrayList<>(edgeLabelKvs.size()); + for (String value : edgeLabelKvs.values()) { + edgeLabels.add(EdgeLabel.fromMap( + JsonUtil.fromJson(value, Map.class), this.graph)); + } + return edgeLabels; + } + + public Id removeEdgeLabel(String graphSpace, String graph, + Id edgeLabel) { + EdgeLabel e = this.getEdgeLabel(graphSpace, graph, + edgeLabel); + this.metaDriver.delete(edgeLabelNameKey(graphSpace, graph, + e.name())); + this.metaDriver.delete(edgeLabelIdKey(graphSpace, graph, + edgeLabel)); + return IdGenerator.ZERO; + } + + public void addIndexLabel(String graphSpace, String graph, + IndexLabel indexLabel) { + String content = serialize(indexLabel); + this.metaDriver.put(indexLabelIdKey(graphSpace, graph, + indexLabel.id()), content); + this.metaDriver.put(indexLabelNameKey(graphSpace, graph, + indexLabel.name()), content); + } + + public void updateIndexLabel(String graphSpace, String graph, + IndexLabel indexLabel) { + this.addIndexLabel(graphSpace, graph, indexLabel); + } + + @SuppressWarnings("unchecked") + public IndexLabel getIndexLabel(String graphSpace, String graph, + Id indexLabel) { + String content = this.metaDriver.get(indexLabelIdKey(graphSpace, graph, + indexLabel)); + if (content == null || content.length() == 0) { + return null; + } else { + return IndexLabel.fromMap(JsonUtil.fromJson(content, Map.class), this.graph); + } + } + + @SuppressWarnings("unchecked") + public IndexLabel getIndexLabel(String graphSpace, String graph, + String edgeLabel) { + String content = this.metaDriver.get(indexLabelNameKey(graphSpace, + graph, + edgeLabel)); + if (content == null || content.length() == 0) { + return null; + } else { + return IndexLabel.fromMap(JsonUtil.fromJson(content, Map.class), this.graph); + } + } + + @SuppressWarnings("unchecked") + public List getIndexLabels(String graphSpace, String graph) { + Map indexLabelKvs = this.metaDriver.scanWithPrefix( + indexLabelPrefix(graphSpace, graph)); + List indexLabels = + new ArrayList<>(indexLabelKvs.size()); + for (String value : indexLabelKvs.values()) { + indexLabels.add(IndexLabel.fromMap( + JsonUtil.fromJson(value, Map.class), this.graph)); + } + return indexLabels; + } + + public Id removeIndexLabel(String graphSpace, String graph, Id indexLabel) { + IndexLabel i = this.getIndexLabel(graphSpace, graph, + indexLabel); + this.metaDriver.delete(indexLabelNameKey(graphSpace, graph, + i.name())); + this.metaDriver.delete(indexLabelIdKey(graphSpace, graph, + indexLabel)); + return IdGenerator.ZERO; + } + + private String propertyKeyPrefix(String graphSpace, String graph) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphspace}/GRAPH/{graph + // }/SCHEMA/PROPERTY_KEY/NAME + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + graph, + META_PATH_SCHEMA, + META_PATH_PROPERTY_KEY, + META_PATH_NAME); + } + + private String propertyKeyIdKey(String graphSpace, String graph, Id id) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphspace}/GRAPH/{graph + // }/SCHEMA/PROPERTY_KEY/ID/{id} + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + graph, + META_PATH_SCHEMA, + META_PATH_PROPERTY_KEY, + META_PATH_ID, + id.asString()); + } + + private String propertyKeyNameKey(String graphSpace, String graph, + String name) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphspace}/GRAPH/{graph + // }/SCHEMA/PROPERTY_KEY/NAME/{name} + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + graph, + META_PATH_SCHEMA, + META_PATH_PROPERTY_KEY, + META_PATH_NAME, + name); + } + + private String vertexLabelPrefix(String graphSpace, String graph) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphspace}/GRAPH/{graph + // }/SCHEMA/VERTEX_LABEL/NAME + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + graph, + META_PATH_SCHEMA, + META_PATH_VERTEX_LABEL, + META_PATH_NAME); + } + + private String vertexLabelIdKey(String graphSpace, String graph, Id id) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphspace}/GRAPH/{graph + // }/SCHEMA/VERTEX_LABEL/ID/{id} + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + graph, + META_PATH_SCHEMA, + META_PATH_VERTEX_LABEL, + META_PATH_ID, + id.asString()); + } + + private String vertexLabelNameKey(String graphSpace, String graph, + String name) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphspace}/GRAPH/{graph + // }/SCHEMA/VERTEX_LABEL/NAME/{name} + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + graph, + META_PATH_SCHEMA, + META_PATH_VERTEX_LABEL, + META_PATH_NAME, + name); + } + + private String edgeLabelPrefix(String graphSpace, String graph) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphspace}/GRAPH/{graph + // }/SCHEMA/PROPERTYKEY/NAME + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + graph, + META_PATH_SCHEMA, + META_PATH_EDGE_LABEL, + META_PATH_NAME); + } + + private String edgeLabelIdKey(String graphSpace, String graph, Id id) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphspace}/GRAPH/{graph + // }/SCHEMA/PROPERTYKEY/ID/{id} + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + graph, + META_PATH_SCHEMA, + META_PATH_EDGE_LABEL, + META_PATH_ID, + id.asString()); + } + + private String edgeLabelNameKey(String graphSpace, String graph, + String name) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphspace}/GRAPH/{graph + // }/SCHEMA/EDGE_LABEL/NAME/{name} + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + graph, + META_PATH_SCHEMA, + META_PATH_EDGE_LABEL, + META_PATH_NAME, + name); + } + + private String indexLabelPrefix(String graphSpace, String graph) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphspace}/GRAPH/{graph + // }/SCHEMA/INDEX_LABEL/NAME + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + graph, + META_PATH_SCHEMA, + META_PATH_INDEX_LABEL, + META_PATH_NAME); + } + + private String indexLabelIdKey(String graphSpace, String graph, Id id) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphspace}/GRAPH/{graph + // }/SCHEMA/INDEX_LABEL/ID/{id} + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + graph, + META_PATH_SCHEMA, + META_PATH_INDEX_LABEL, + META_PATH_ID, + id.asString()); + } + + private String indexLabelNameKey(String graphSpace, String graph, + String name) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphspace}/GRAPH/{graph + // }/SCHEMA/INDEX_LABEL/NAME/{name} + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + graph, + META_PATH_SCHEMA, + META_PATH_INDEX_LABEL, + META_PATH_NAME, + name); + } + + private String graphNameKey(String graphSpace, String graph) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphspace}/GRAPH/{graph}/SCHEMA + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + graph, + META_PATH_SCHEMA); + } + + public void clearAllSchema(String graphSpace, String graph) { + this.metaDriver.deleteWithPrefix(graphNameKey(graphSpace, graph)); + } + +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/SchemaTemplateMetaManager.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/SchemaTemplateMetaManager.java new file mode 100644 index 0000000000..bc2f1448bc --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/SchemaTemplateMetaManager.java @@ -0,0 +1,107 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.meta.managers; + +import static org.apache.hugegraph.meta.MetaManager.META_PATH_DELIMITER; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_GRAPHSPACE; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_HUGEGRAPH; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_SCHEMA_TEMPLATE; + +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +import org.apache.commons.lang3.StringUtils; +import org.apache.hugegraph.HugeException; +import org.apache.hugegraph.meta.MetaDriver; +import org.apache.hugegraph.space.SchemaTemplate; +import org.apache.hugegraph.util.JsonUtil; +import org.apache.logging.log4j.util.Strings; + +public class SchemaTemplateMetaManager extends AbstractMetaManager { + + public SchemaTemplateMetaManager(MetaDriver metaDriver, String cluster) { + super(metaDriver, cluster); + } + + public Set schemaTemplates(String graphSpace) { + Set result = new HashSet<>(); + Map keyValues = this.metaDriver.scanWithPrefix( + this.schemaTemplatePrefix(graphSpace)); + for (String key : keyValues.keySet()) { + String[] parts = key.split(META_PATH_DELIMITER); + result.add(parts[parts.length - 1]); + } + return result; + } + + @SuppressWarnings("unchecked") + public SchemaTemplate schemaTemplate(String graphSpace, + String schemaTemplate) { + String s = this.metaDriver.get(this.schemaTemplateKey(graphSpace, + schemaTemplate)); + if (StringUtils.isEmpty(s)) { + return null; + } + return SchemaTemplate.fromMap(JsonUtil.fromJson(s, Map.class)); + } + + public void addSchemaTemplate(String graphSpace, SchemaTemplate template) { + + String key = this.schemaTemplateKey(graphSpace, template.name()); + + String data = this.metaDriver.get(key); + if (StringUtils.isNotEmpty(data)) { + throw new HugeException("Cannot create schema template " + + "since it has been created"); + } + + this.metaDriver.put(this.schemaTemplateKey(graphSpace, template.name()), + JsonUtil.toJson(template.asMap())); + } + + public void updateSchemaTemplate(String graphSpace, + SchemaTemplate template) { + this.metaDriver.put(this.schemaTemplateKey(graphSpace, template.name()), + JsonUtil.toJson(template.asMap())); + } + + public void removeSchemaTemplate(String graphSpace, String name) { + this.metaDriver.delete(this.schemaTemplateKey(graphSpace, name)); + } + + public void clearSchemaTemplate(String graphSpace) { + String prefix = this.schemaTemplatePrefix(graphSpace); + this.metaDriver.deleteWithPrefix(prefix); + } + + private String schemaTemplatePrefix(String graphSpace) { + return this.schemaTemplateKey(graphSpace, Strings.EMPTY); + } + + private String schemaTemplateKey(String graphSpace, String schemaTemplate) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphspace}/SCHEMA_TEMPLATE + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + META_PATH_SCHEMA_TEMPLATE, + schemaTemplate); + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/ServiceMetaManager.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/ServiceMetaManager.java new file mode 100644 index 0000000000..3c03be3433 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/ServiceMetaManager.java @@ -0,0 +1,170 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.meta.managers; + +import static org.apache.hugegraph.meta.MetaManager.META_PATH_ADD; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_DELIMITER; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_EVENT; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_GRAPHSPACE; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_HUGEGRAPH; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_JOIN; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_REMOVE; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_SERVICE; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_SERVICE_CONF; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_UPDATE; + +import java.util.HashMap; +import java.util.Map; +import java.util.function.Consumer; + +import org.apache.commons.lang3.StringUtils; +import org.apache.hugegraph.meta.MetaDriver; +import org.apache.hugegraph.space.Service; +import org.apache.hugegraph.util.JsonUtil; +import org.apache.logging.log4j.util.Strings; + +public class ServiceMetaManager extends AbstractMetaManager { + + public ServiceMetaManager(MetaDriver metaDriver, String cluster) { + super(metaDriver, cluster); + } + + private static String serviceName(String graphSpace, String name) { + return String.join(META_PATH_JOIN, graphSpace, name); + } + + public Map serviceConfigs(String graphSpace) { + Map serviceMap = new HashMap<>(); + Map keyValues = this.metaDriver.scanWithPrefix( + this.serviceConfPrefix(graphSpace)); + for (Map.Entry entry : keyValues.entrySet()) { + String key = entry.getKey(); + String[] parts = key.split(META_PATH_DELIMITER); + serviceMap.put(parts[parts.length - 1], + JsonUtil.fromJson(entry.getValue(), Service.class)); + } + return serviceMap; + } + + public String getServiceRawConfig(String graphSpace, String service) { + return this.metaDriver.get(this.serviceConfKey(graphSpace, service)); + } + + public Service getServiceConfig(String graphSpace, String service) { + String s = this.getServiceRawConfig(graphSpace, service); + return this.parseServiceRawConfig(s); + } + + public Service parseServiceRawConfig(String serviceRawConf) { + return JsonUtil.fromJson(serviceRawConf, Service.class); + } + + public void notifyServiceAdd(String graphSpace, String name) { + this.metaDriver.put(this.serviceAddKey(), + serviceName(graphSpace, name)); + } + + public void notifyServiceRemove(String graphSpace, String name) { + this.metaDriver.put(this.serviceRemoveKey(), + serviceName(graphSpace, name)); + } + + public void notifyServiceUpdate(String graphSpace, String name) { + this.metaDriver.put(this.serviceUpdateKey(), + serviceName(graphSpace, name)); + } + + public Service service(String graphSpace, String name) { + String service = this.metaDriver.get(this.serviceConfKey(graphSpace, + name)); + if (StringUtils.isEmpty(service)) { + return null; + } + return JsonUtil.fromJson(service, Service.class); + } + + public void addServiceConfig(String graphSpace, Service service) { + this.metaDriver.put(this.serviceConfKey(graphSpace, service.name()), + JsonUtil.toJson(service)); + } + + public void removeServiceConfig(String graphSpace, String service) { + this.metaDriver.delete(this.serviceConfKey(graphSpace, service)); + } + + public void updateServiceConfig(String graphSpace, Service service) { + this.addServiceConfig(graphSpace, service); + } + + public void listenServiceAdd(Consumer consumer) { + this.listen(this.serviceAddKey(), consumer); + } + + public void listenServiceRemove(Consumer consumer) { + this.listen(this.serviceRemoveKey(), consumer); + } + + public void listenServiceUpdate(Consumer consumer) { + this.listen(this.serviceUpdateKey(), consumer); + } + + private String serviceConfPrefix(String graphSpace) { + return this.serviceConfKey(graphSpace, Strings.EMPTY); + } + + private String serviceAddKey() { + // HUGEGRAPH/{cluster}/EVENT/SERVICE/ADD + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_EVENT, + META_PATH_SERVICE, + META_PATH_ADD); + } + + private String serviceRemoveKey() { + // HUGEGRAPH/{cluster}/EVENT/SERVICE/REMOVE + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_EVENT, + META_PATH_SERVICE, + META_PATH_REMOVE); + } + + private String serviceUpdateKey() { + // HUGEGRAPH/{cluster}/EVENT/SERVICE/UPDATE + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_EVENT, + META_PATH_SERVICE, + META_PATH_UPDATE); + } + + private String serviceConfKey(String graphSpace, String name) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphspace}/SERVICE_CONF + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + META_PATH_SERVICE_CONF, + name); + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/SpaceMetaManager.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/SpaceMetaManager.java new file mode 100644 index 0000000000..d9e1688247 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/SpaceMetaManager.java @@ -0,0 +1,202 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.meta.managers; + +import static org.apache.hugegraph.meta.MetaManager.META_PATH_ADD; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_CONF; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_DELIMITER; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_EVENT; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_GRAPHSPACE; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_GRAPHSPACE_LIST; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_HUGEGRAPH; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_REMOVE; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_UPDATE; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.function.Consumer; + +import org.apache.commons.lang3.StringUtils; +import org.apache.hugegraph.meta.MetaDriver; +import org.apache.hugegraph.space.GraphSpace; +import org.apache.hugegraph.type.define.CollectionType; +import org.apache.hugegraph.util.JsonUtil; +import org.apache.hugegraph.util.collection.CollectionFactory; + +public class SpaceMetaManager extends AbstractMetaManager { + + public SpaceMetaManager(MetaDriver metaDriver, String cluster) { + super(metaDriver, cluster); + } + + public List listGraphSpace() { + List result = new ArrayList<>(); + Map graphSpaceMap = this.metaDriver.scanWithPrefix( + graphSpaceListKey()); + for (Map.Entry item : graphSpaceMap.entrySet()) { + result.add(item.getValue()); + } + + return result; + } + + public Map graphSpaceConfigs() { + Map keyValues = this.metaDriver.scanWithPrefix( + this.graphSpaceConfPrefix()); + Map configs = + CollectionFactory.newMap(CollectionType.EC); + for (Map.Entry entry : keyValues.entrySet()) { + String key = entry.getKey(); + String[] parts = key.split(META_PATH_DELIMITER); + configs.put(parts[parts.length - 1], + JsonUtil.fromJson(entry.getValue(), GraphSpace.class)); + } + return configs; + } + + public GraphSpace graphSpace(String name) { + String space = this.metaDriver.get(this.graphSpaceConfKey(name)); + if (StringUtils.isEmpty(space)) { + return null; + } + return JsonUtil.fromJson(space, GraphSpace.class); + } + + public GraphSpace getGraphSpaceConfig(String graphSpace) { + String gs = this.metaDriver.get(this.graphSpaceConfKey(graphSpace)); + if (StringUtils.isEmpty(gs)) { + return null; + } + return JsonUtil.fromJson(gs, GraphSpace.class); + } + + public void addGraphSpaceConfig(String name, GraphSpace space) { + this.metaDriver.put(this.graphSpaceConfKey(name), + JsonUtil.toJson(space)); + } + + public void removeGraphSpaceConfig(String name) { + this.metaDriver.delete(this.graphSpaceConfKey(name)); + } + + public void updateGraphSpaceConfig(String name, GraphSpace space) { + this.metaDriver.put(this.graphSpaceConfKey(name), + JsonUtil.toJson(space)); + } + + public void appendGraphSpaceList(String name) { + String key = this.graphSpaceListKey(name); + this.metaDriver.put(key, name); + } + + public void clearGraphSpaceList(String name) { + String key = this.graphSpaceListKey(name); + this.metaDriver.delete(key); + } + + public void listenGraphSpaceAdd(Consumer consumer) { + this.listen(this.graphSpaceAddKey(), consumer); + } + + public void listenGraphSpaceRemove(Consumer consumer) { + this.listen(this.graphSpaceRemoveKey(), consumer); + } + + public void listenGraphSpaceUpdate(Consumer consumer) { + this.listen(this.graphSpaceUpdateKey(), consumer); + } + + public void notifyGraphSpaceAdd(String graphSpace) { + this.metaDriver.put(this.graphSpaceAddKey(), graphSpace); + } + + public void notifyGraphSpaceRemove(String graphSpace) { + this.metaDriver.put(this.graphSpaceRemoveKey(), graphSpace); + } + + public void notifyGraphSpaceUpdate(String graphSpace) { + this.metaDriver.put(this.graphSpaceUpdateKey(), graphSpace); + } + + private String graphSpaceConfPrefix() { + // HUGEGRAPH/{cluster}/GRAPHSPACE/CONF + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + META_PATH_CONF); + } + + private String graphSpaceAddKey() { + // HUGEGRAPH/{cluster}/EVENT/GRAPHSPACE/ADD + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_EVENT, + META_PATH_GRAPHSPACE, + META_PATH_ADD); + } + + private String graphSpaceRemoveKey() { + // HUGEGRAPH/{cluster}/EVENT/GRAPHSPACE/REMOVE + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_EVENT, + META_PATH_GRAPHSPACE, + META_PATH_REMOVE); + } + + private String graphSpaceUpdateKey() { + // HUGEGRAPH/{cluster}/EVENT/GRAPHSPACE/UPDATE + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_EVENT, + META_PATH_GRAPHSPACE, + META_PATH_UPDATE); + } + + private String graphSpaceConfKey(String name) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/CONF/{graphspace} + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + META_PATH_CONF, + name); + } + + private String graphSpaceListKey(String name) { + // HUGEGRAPH/{cluster}/GRAPHSPACE_LIST/{graphspace} + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE_LIST, + name); + } + + private String graphSpaceListKey() { + // HUGEGRAPH/{cluster}/GRAPHSPACE_LIST + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE_LIST); + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/TaskMetaManager.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/TaskMetaManager.java new file mode 100644 index 0000000000..3ab16eced5 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/meta/managers/TaskMetaManager.java @@ -0,0 +1,76 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.meta.managers; + +import static org.apache.hugegraph.meta.MetaManager.META_PATH_DELIMITER; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_GRAPHSPACE; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_HUGEGRAPH; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_TASK; +import static org.apache.hugegraph.meta.MetaManager.META_PATH_TASK_LOCK; + +import org.apache.hugegraph.meta.MetaDriver; +import org.apache.hugegraph.meta.lock.LockResult; + +public class TaskMetaManager extends AbstractMetaManager { + + private static final String TASK_STATUS_POSTFIX = "Status"; + private static final String TASK_PROGRESS_POSTFIX = "Progress"; + private static final String TASK_CONTEXT_POSTFIX = "Context"; + private static final String TASK_RETRY_POSTFIX = "Retry"; + + public TaskMetaManager(MetaDriver metaDriver, String cluster) { + super(metaDriver, cluster); + } + + public LockResult tryLockTask(String graphSpace, String graphName, + String taskId) { + String key = taskLockKey(graphSpace, graphName, taskId); + return this.tryLock(key); + } + + public boolean isLockedTask(String graphSpace, String graphName, + String taskId) { + + String key = taskLockKey(graphSpace, graphName, taskId); + // 判断当前任务是否锁定 + return metaDriver.isLocked(key); + } + + public void unlockTask(String graphSpace, String graphName, + String taskId, LockResult lockResult) { + + String key = taskLockKey(graphSpace, graphName, taskId); + + this.unlock(key, lockResult); + } + + private String taskLockKey(String graphSpace, + String graphName, + String taskId) { + // HUGEGRAPH/{cluster}/GRAPHSPACE/{graphSpace}/{graphName}/TASK/{id}/TASK_LOCK + return String.join(META_PATH_DELIMITER, + META_PATH_HUGEGRAPH, + this.cluster, + META_PATH_GRAPHSPACE, + graphSpace, + graphName, + META_PATH_TASK, + taskId, + META_PATH_TASK_LOCK); + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/EdgeLabel.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/EdgeLabel.java index 79380a0d41..d027dd2f58 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/EdgeLabel.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/EdgeLabel.java @@ -20,14 +20,21 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import org.apache.commons.lang3.tuple.Pair; import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.backend.id.IdGenerator; import org.apache.hugegraph.schema.builder.SchemaBuilder; import org.apache.hugegraph.HugeGraph; import org.apache.hugegraph.type.HugeType; +import org.apache.hugegraph.type.define.EdgeLabelType; import org.apache.hugegraph.type.define.Frequency; +import org.apache.hugegraph.type.define.SchemaStatus; import org.apache.hugegraph.util.E; import com.google.common.base.Objects; @@ -39,6 +46,7 @@ public class EdgeLabel extends SchemaLabel { private Id targetLabel = NONE_ID; private Frequency frequency; private List sortKeys; + private EdgeLabelType edgeLabelType; public EdgeLabel(final HugeGraph graph, Id id, String name) { super(graph, id, name); @@ -55,6 +63,10 @@ public Frequency frequency() { return this.frequency; } + public void edgeLabelType(EdgeLabelType type) { + this.edgeLabelType = type; + } + public void frequency(Frequency frequency) { this.frequency = frequency; } @@ -164,4 +176,168 @@ public interface Builder extends SchemaBuilder { Builder userdata(Map userdata); } + + @Override + public Map asMap() { + Map map = new HashMap<>(); + + if (this.sourceLabel() != null && this.sourceLabel() != NONE_ID) { + map.put(P.SOURCE_LABEL, this.sourceLabel().asString()); + } + + if (this.targetLabel() != null && this.targetLabel() != NONE_ID) { + map.put(P.TARGET_LABEL, this.targetLabel().asString()); + } + + if (this.properties() != null) { + map.put(P.PROPERTIES, this.properties()); + } + + if (this.nullableKeys() != null) { + map.put(P.NULLABLE_KEYS, this.nullableKeys()); + } + + if (this.indexLabels() != null) { + map.put(P.INDEX_LABELS, this.indexLabels()); + } + + if (this.ttlStartTime() != null) { + map.put(P.TT_START_TIME, this.ttlStartTime().asString()); + } + + if (this.sortKeys() != null) { + map.put(P.SORT_KEYS, this.sortKeys); + } + + //map.put(P.EDGELABEL_TYPE, this.edgeLabelType); + //if (this.fatherId() != null) { + // map.put(P.FATHER_ID, this.fatherId().asString()); + //} + map.put(P.ENABLE_LABEL_INDEX, this.enableLabelIndex()); + map.put(P.TTL, String.valueOf(this.ttl())); + //map.put(P.LINKS, this.links()); + map.put(P.FREQUENCY, this.frequency().toString()); + + return super.asMap(map); + } + + @SuppressWarnings("unchecked") + public static EdgeLabel fromMap(Map map, HugeGraph graph) { + Id id = IdGenerator.of((int) map.get(EdgeLabel.P.ID)); + String name = (String) map.get(EdgeLabel.P.NAME); + EdgeLabel edgeLabel = new EdgeLabel(graph, id, name); + for (Map.Entry entry : map.entrySet()) { + switch (entry.getKey()) { + case P.ID: + case P.NAME: + break; + case P.STATUS: + edgeLabel.status( + SchemaStatus.valueOf(((String) entry.getValue()).toUpperCase())); + break; + case P.USERDATA: + edgeLabel.userdata(new Userdata((Map) entry.getValue())); + break; + case P.PROPERTIES: + Set ids = ((List) entry.getValue()).stream().map( + IdGenerator::of).collect(Collectors.toSet()); + edgeLabel.properties(ids); + break; + case P.NULLABLE_KEYS: + ids = ((List) entry.getValue()).stream().map( + IdGenerator::of).collect(Collectors.toSet()); + edgeLabel.nullableKeys(ids); + break; + case P.INDEX_LABELS: + ids = ((List) entry.getValue()).stream().map( + IdGenerator::of).collect(Collectors.toSet()); + edgeLabel.addIndexLabels(ids.toArray(new Id[0])); + break; + case P.ENABLE_LABEL_INDEX: + boolean enableLabelIndex = (Boolean) entry.getValue(); + edgeLabel.enableLabelIndex(enableLabelIndex); + break; + case P.TTL: + long ttl = Long.parseLong((String) entry.getValue()); + edgeLabel.ttl(ttl); + break; + case P.TT_START_TIME: + long ttlStartTime = + Long.parseLong((String) entry.getValue()); + edgeLabel.ttlStartTime(IdGenerator.of(ttlStartTime)); + break; + //case P.LINKS: + // // TODO: serialize and deserialize + // List list = (List) entry.getValue(); + // for (Map m : list) { + // for (Object key : m.keySet()) { + // Id sid = IdGenerator.of(Long.parseLong((String) key)); + // Id tid = IdGenerator.of(Long.parseLong(String.valueOf(m.get(key)))); + // edgeLabel.links(Pair.of(sid, tid)); + // } + // } + // break; + case P.SOURCE_LABEL: + long sourceLabel = + Long.parseLong((String) entry.getValue()); + edgeLabel.sourceLabel(IdGenerator.of(sourceLabel)); + break; + case P.TARGET_LABEL: + long targetLabel = + Long.parseLong((String) entry.getValue()); + edgeLabel.targetLabel(IdGenerator.of(targetLabel)); + break; + //case P.FATHER_ID: + // long fatherId = + // Long.parseLong((String) entry.getValue()); + // edgeLabel.fatherId(IdGenerator.of(fatherId)); + // break; + //case P.EDGELABEL_TYPE: + // EdgeLabelType edgeLabelType = + // EdgeLabelType.valueOf( + // ((String) entry.getValue()).toUpperCase()); + // edgeLabel.edgeLabelType(edgeLabelType); + // break; + case P.FREQUENCY: + Frequency frequency = + Frequency.valueOf(((String) entry.getValue()).toUpperCase()); + edgeLabel.frequency(frequency); + break; + case P.SORT_KEYS: + ids = ((List) entry.getValue()).stream().map( + IdGenerator::of).collect(Collectors.toSet()); + edgeLabel.sortKeys(ids.toArray(new Id[0])); + break; + default: + throw new AssertionError(String.format( + "Invalid key '%s' for edge label", + entry.getKey())); + } + } + return edgeLabel; + } + + public static final class P { + + public static final String ID = "id"; + public static final String NAME = "name"; + + public static final String STATUS = "status"; + public static final String USERDATA = "userdata"; + + public static final String PROPERTIES = "properties"; + public static final String NULLABLE_KEYS = "nullableKeys"; + public static final String INDEX_LABELS = "indexLabels"; + + public static final String ENABLE_LABEL_INDEX = "enableLabelIndex"; + public static final String TTL = "ttl"; + public static final String TT_START_TIME = "ttlStartTime"; + public static final String LINKS = "links"; + public static final String SOURCE_LABEL = "sourceLabel"; + public static final String TARGET_LABEL = "targetLabel"; + public static final String EDGELABEL_TYPE = "edgeLabelType"; + public static final String FATHER_ID = "fatherId"; + public static final String FREQUENCY = "frequency"; + public static final String SORT_KEYS = "sortKeys"; + } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/IndexLabel.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/IndexLabel.java index 54c33b53c3..090cf6dffe 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/IndexLabel.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/IndexLabel.java @@ -20,8 +20,10 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.stream.Collectors; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.id.IdGenerator; @@ -29,6 +31,7 @@ import org.apache.hugegraph.schema.builder.SchemaBuilder; import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.type.define.IndexType; +import org.apache.hugegraph.type.define.SchemaStatus; import org.apache.hugegraph.util.E; import com.google.common.base.Objects; @@ -164,6 +167,8 @@ public Object validValue(Object value) { public static IndexLabel label(HugeType type) { switch (type) { + case TASK: + case SERVER: case VERTEX: return VL_IL; case EDGE: @@ -280,4 +285,74 @@ public interface Builder extends SchemaBuilder { Builder rebuild(boolean rebuild); } + @Override + public Map asMap() { + HashMap map = new HashMap<>(); + map.put(P.BASE_TYPE, this.baseType().name()); + map.put(P.BASE_VALUE, this.baseValue().asString()); + map.put(P.INDEX_TYPE, this.indexType().name()); + map.put(P.INDEX_FIELDS, this.indexFields()); + return super.asMap(map); + } + + @SuppressWarnings("unchecked") + public static IndexLabel fromMap(Map map, HugeGraph graph) { + Id id = IdGenerator.of((int) map.get(IndexLabel.P.ID)); + String name = (String) map.get(IndexLabel.P.NAME); + + IndexLabel indexLabel = new IndexLabel(graph, id, name); + for (Map.Entry entry : map.entrySet()) { + switch (entry.getKey()) { + case P.ID: + case P.NAME: + break; + case P.STATUS: + indexLabel.status( + SchemaStatus.valueOf(((String) entry.getValue()).toUpperCase())); + break; + case P.USERDATA: + indexLabel.userdata(new Userdata((Map) entry.getValue())); + break; + case P.BASE_TYPE: + HugeType hugeType = + HugeType.valueOf(((String) entry.getValue()).toUpperCase()); + indexLabel.baseType(hugeType); + break; + case P.BASE_VALUE: + long sourceLabel = + Long.parseLong((String) entry.getValue()); + indexLabel.baseValue(IdGenerator.of(sourceLabel)); + break; + case P.INDEX_TYPE: + IndexType indexType = + IndexType.valueOf(((String) entry.getValue()).toUpperCase()); + indexLabel.indexType(indexType); + break; + case P.INDEX_FIELDS: + List ids = ((List) entry.getValue()).stream().map( + IdGenerator::of).collect(Collectors.toList()); + indexLabel.indexFields(ids.toArray(new Id[0])); + break; + default: + throw new AssertionError(String.format( + "Invalid key '%s' for index label", + entry.getKey())); + } + } + return indexLabel; + } + + public static final class P { + + public static final String ID = "id"; + public static final String NAME = "name"; + + public static final String STATUS = "status"; + public static final String USERDATA = "userdata"; + + public static final String BASE_TYPE = "baseType"; + public static final String BASE_VALUE = "baseValue"; + public static final String INDEX_TYPE = "indexType"; + public static final String INDEX_FIELDS = "indexFields"; + } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/PropertyKey.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/PropertyKey.java index f86bec0aec..59c1e73a4c 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/PropertyKey.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/PropertyKey.java @@ -20,6 +20,7 @@ import java.util.ArrayList; import java.util.Collection; import java.util.Collections; +import java.util.HashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; @@ -28,6 +29,7 @@ import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.HugeException; import org.apache.hugegraph.HugeGraph; +import org.apache.hugegraph.backend.id.IdGenerator; import org.apache.hugegraph.exception.NotSupportException; import org.apache.hugegraph.schema.builder.SchemaBuilder; import org.apache.hugegraph.type.HugeType; @@ -35,6 +37,7 @@ import org.apache.hugegraph.type.define.AggregateType; import org.apache.hugegraph.type.define.Cardinality; import org.apache.hugegraph.type.define.DataType; +import org.apache.hugegraph.type.define.SchemaStatus; import org.apache.hugegraph.type.define.WriteType; import org.apache.hugegraph.util.E; import org.apache.hugegraph.util.LongEncoding; @@ -409,4 +412,85 @@ public interface Builder extends SchemaBuilder { Builder userdata(Map userdata); } + + @Override + public Map asMap() { + Map map = new HashMap<>(); + + if (this.dataType != null) { + map.put(P.DATA_TYPE, this.dataType.string()); + } + + if (this.cardinality != null) { + map.put(P.CARDINALITY, this.cardinality.string()); + } + + if (this.aggregateType != null) { + map.put(P.AGGREGATE_TYPE, this.aggregateType.string()); + } + + if (this.writeType != null) { + map.put(P.WRITE_TYPE, this.writeType.string()); + } + + return super.asMap(map); + } + + @SuppressWarnings("unchecked") + public static PropertyKey fromMap(Map map, HugeGraph graph) { + Id id = IdGenerator.of((int) map.get(P.ID)); + String name = (String) map.get(P.NAME); + + PropertyKey propertyKey = new PropertyKey(graph, id, name); + for (Map.Entry entry : map.entrySet()) { + switch (entry.getKey()) { + case P.ID: + case P.NAME: + break; + case P.STATUS: + propertyKey.status( + SchemaStatus.valueOf(((String) entry.getValue()).toUpperCase())); + break; + case P.USERDATA: + propertyKey.userdata(new Userdata((Map) entry.getValue())); + break; + case P.AGGREGATE_TYPE: + propertyKey.aggregateType( + AggregateType.valueOf(((String) entry.getValue()).toUpperCase())); + break; + case P.WRITE_TYPE: + propertyKey.writeType( + WriteType.valueOf(((String) entry.getValue()).toUpperCase())); + break; + case P.DATA_TYPE: + propertyKey.dataType( + DataType.valueOf(((String) entry.getValue()).toUpperCase())); + break; + case P.CARDINALITY: + propertyKey.cardinality( + Cardinality.valueOf(((String) entry.getValue()).toUpperCase())); + break; + default: + throw new AssertionError(String.format( + "Invalid key '%s' for property key", + entry.getKey())); + } + } + return propertyKey; + } + + public static final class P { + + public static final String ID = "id"; + public static final String NAME = "name"; + + public static final String STATUS = "status"; + public static final String USERDATA = "userdata"; + + public static final String DATA_TYPE = "data_type"; + public static final String CARDINALITY = "cardinality"; + + public static final String AGGREGATE_TYPE = "aggregate_type"; + public static final String WRITE_TYPE = "write_type"; + } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/SchemaElement.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/SchemaElement.java index 1e42767a0c..05d91bec3d 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/SchemaElement.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/SchemaElement.java @@ -47,6 +47,11 @@ public abstract class SchemaElement implements Nameable, Typeable, protected static final int ILN_IL_ID = -6; protected static final int OLAP_VL_ID = -7; + // OLAP_ID means all of vertex label ids + public static final Id OLAP_ID = IdGenerator.of(-7); + // OLAP means all of vertex label names + public static final String OLAP = "~olap"; + public static final Id NONE_ID = IdGenerator.ZERO; public static final String UNDEF = "~undefined"; @@ -217,4 +222,31 @@ public Id task() { return this.task; } } + + public abstract Map asMap(); + + public Map asMap(Map map) { + E.checkState(this.id != null, + "Property key id can't be null"); + E.checkState(this.name != null, + "Property key name can't be null"); + E.checkState(this.status != null, + "Property status can't be null"); + + map.put(P.ID, this.id); + map.put(P.NAME, this.name); + map.put(P.STATUS, this.status.string()); + map.put(P.USERDATA, this.userdata); + + return map; + } + + public static final class P { + + public static final String ID = "id"; + public static final String NAME = "name"; + + public static final String STATUS = "status"; + public static final String USERDATA = "userdata"; + } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/SchemaManager.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/SchemaManager.java index ec55a823f9..0b31ab9698 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/SchemaManager.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/SchemaManager.java @@ -20,6 +20,7 @@ import java.util.List; import java.util.stream.Collectors; +import org.apache.hugegraph.backend.tx.ISchemaTransaction; import org.apache.hugegraph.backend.tx.SchemaTransaction; import org.apache.tinkerpop.gremlin.structure.Graph; @@ -34,10 +35,10 @@ public class SchemaManager { - private final SchemaTransaction transaction; + private final ISchemaTransaction transaction; private HugeGraph graph; - public SchemaManager(SchemaTransaction transaction, HugeGraph graph) { + public SchemaManager(ISchemaTransaction transaction, HugeGraph graph) { E.checkNotNull(transaction, "transaction"); E.checkNotNull(graph, "graph"); this.transaction = transaction; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/VertexLabel.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/VertexLabel.java index 2b3b8e9bf1..487323af10 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/VertexLabel.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/VertexLabel.java @@ -20,8 +20,11 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.id.IdGenerator; @@ -29,6 +32,8 @@ import org.apache.hugegraph.schema.builder.SchemaBuilder; import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.type.define.IdStrategy; +import org.apache.hugegraph.type.define.SchemaStatus; + import com.google.common.base.Objects; public class VertexLabel extends SchemaLabel { @@ -132,4 +137,111 @@ public interface Builder extends SchemaBuilder { Builder userdata(Map userdata); } + + @Override + public Map asMap() { + HashMap map = new HashMap(); + + map.put(P.PROPERTIES, this.properties()); + + map.put(P.NULLABLE_KEYS, this.nullableKeys()); + + map.put(P.INDEX_LABELS, this.indexLabels()); + + map.put(P.ENABLE_LABEL_INDEX, this.enableLabelIndex()); + + map.put(P.TTL, String.valueOf(this.ttl())); + + map.put(P.TT_START_TIME, this.ttlStartTime().asString()); + + map.put(P.ID_STRATEGY, this.idStrategy().string()); + + map.put(P.PRIMARY_KEYS, this.primaryKeys()); + + return super.asMap(map); + } + + @SuppressWarnings("unchecked") + public static VertexLabel fromMap(Map map, HugeGraph graph) { + Id id = IdGenerator.of((int) map.get(VertexLabel.P.ID)); + String name = (String) map.get(VertexLabel.P.NAME); + + VertexLabel vertexLabel = new VertexLabel(graph, id, name); + for (Map.Entry entry : map.entrySet()) { + switch (entry.getKey()) { + case P.ID: + case P.NAME: + break; + case P.STATUS: + vertexLabel.status( + SchemaStatus.valueOf(((String) entry.getValue()).toUpperCase())); + break; + case P.USERDATA: + vertexLabel.userdata(new Userdata((Map) entry.getValue())); + break; + case P.PROPERTIES: + Set ids = ((List) entry.getValue()).stream().map( + IdGenerator::of).collect(Collectors.toSet()); + vertexLabel.properties(ids); + break; + case P.NULLABLE_KEYS: + ids = ((List) entry.getValue()).stream().map( + IdGenerator::of).collect(Collectors.toSet()); + vertexLabel.nullableKeys(ids); + break; + case P.INDEX_LABELS: + ids = ((List) entry.getValue()).stream().map( + IdGenerator::of).collect(Collectors.toSet()); + vertexLabel.addIndexLabels(ids.toArray(new Id[0])); + break; + case P.ENABLE_LABEL_INDEX: + boolean enableLabelIndex = (Boolean) entry.getValue(); + vertexLabel.enableLabelIndex(enableLabelIndex); + break; + case P.TTL: + long ttl = Long.parseLong((String) entry.getValue()); + vertexLabel.ttl(ttl); + break; + case P.TT_START_TIME: + long ttlStartTime = + Long.parseLong((String) entry.getValue()); + vertexLabel.ttlStartTime(IdGenerator.of(ttlStartTime)); + break; + case P.ID_STRATEGY: + IdStrategy idStrategy = + IdStrategy.valueOf(((String) entry.getValue()).toUpperCase()); + vertexLabel.idStrategy(idStrategy); + break; + case P.PRIMARY_KEYS: + ids = ((List) entry.getValue()).stream().map( + IdGenerator::of).collect(Collectors.toSet()); + vertexLabel.primaryKeys(ids.toArray(new Id[0])); + break; + default: + throw new AssertionError(String.format( + "Invalid key '%s' for vertex label", + entry.getKey())); + } + } + return vertexLabel; + } + + public static final class P { + + public static final String ID = "id"; + public static final String NAME = "name"; + + public static final String STATUS = "status"; + public static final String USERDATA = "userdata"; + + public static final String PROPERTIES = "properties"; + public static final String NULLABLE_KEYS = "nullableKeys"; + public static final String INDEX_LABELS = "indexLabels"; + + public static final String ENABLE_LABEL_INDEX = "enableLabelIndex"; + public static final String TTL = "ttl"; + public static final String TT_START_TIME = "ttlStartTime"; + public static final String ID_STRATEGY = "idStrategy"; + public static final String PRIMARY_KEYS = "primaryKeys"; + } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/AbstractBuilder.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/AbstractBuilder.java index 7c1620fda7..3e2859f236 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/AbstractBuilder.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/AbstractBuilder.java @@ -22,6 +22,7 @@ import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.id.IdGenerator; +import org.apache.hugegraph.backend.tx.ISchemaTransaction; import org.apache.hugegraph.backend.tx.SchemaTransaction; import org.apache.hugegraph.schema.IndexLabel; import org.apache.hugegraph.schema.PropertyKey; @@ -38,10 +39,10 @@ public abstract class AbstractBuilder { - private final SchemaTransaction transaction; + private final ISchemaTransaction transaction; private final HugeGraph graph; - public AbstractBuilder(SchemaTransaction transaction, HugeGraph graph) { + public AbstractBuilder(ISchemaTransaction transaction, HugeGraph graph) { E.checkNotNull(transaction, "transaction"); E.checkNotNull(graph, "graph"); this.transaction = transaction; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/EdgeLabelBuilder.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/EdgeLabelBuilder.java index 87ec1d78bf..a5ff1e8ae1 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/EdgeLabelBuilder.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/EdgeLabelBuilder.java @@ -29,6 +29,7 @@ import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.id.IdGenerator; +import org.apache.hugegraph.backend.tx.ISchemaTransaction; import org.apache.hugegraph.backend.tx.SchemaTransaction; import org.apache.hugegraph.schema.PropertyKey; import org.apache.hugegraph.schema.Userdata; @@ -62,7 +63,7 @@ public class EdgeLabelBuilder extends AbstractBuilder private Userdata userdata; private boolean checkExist; - public EdgeLabelBuilder(SchemaTransaction transaction, + public EdgeLabelBuilder(ISchemaTransaction transaction, HugeGraph graph, String name) { super(transaction, graph); E.checkNotNull(name, "name"); @@ -81,7 +82,7 @@ public EdgeLabelBuilder(SchemaTransaction transaction, this.checkExist = true; } - public EdgeLabelBuilder(SchemaTransaction transaction, + public EdgeLabelBuilder(ISchemaTransaction transaction, HugeGraph graph, EdgeLabel copy) { super(transaction, graph); E.checkNotNull(copy, "copy"); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/IndexLabelBuilder.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/IndexLabelBuilder.java index 8ab7197b5a..1484b05dcf 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/IndexLabelBuilder.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/IndexLabelBuilder.java @@ -27,6 +27,7 @@ import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.id.IdGenerator; +import org.apache.hugegraph.backend.tx.ISchemaTransaction; import org.apache.hugegraph.backend.tx.SchemaTransaction; import org.apache.hugegraph.HugeException; import org.apache.hugegraph.HugeGraph; @@ -64,7 +65,7 @@ public class IndexLabelBuilder extends AbstractBuilder private boolean checkExist; private boolean rebuild; - public IndexLabelBuilder(SchemaTransaction transaction, + public IndexLabelBuilder(ISchemaTransaction transaction, HugeGraph graph, String name) { super(transaction, graph); E.checkNotNull(name, "name"); @@ -79,7 +80,7 @@ public IndexLabelBuilder(SchemaTransaction transaction, this.rebuild = true; } - public IndexLabelBuilder(SchemaTransaction transaction, + public IndexLabelBuilder(ISchemaTransaction transaction, HugeGraph graph, IndexLabel copy) { super(transaction, graph); E.checkNotNull(copy, "copy"); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/PropertyKeyBuilder.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/PropertyKeyBuilder.java index 63a3371f65..54684a3e34 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/PropertyKeyBuilder.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/PropertyKeyBuilder.java @@ -22,6 +22,7 @@ import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.id.IdGenerator; +import org.apache.hugegraph.backend.tx.ISchemaTransaction; import org.apache.hugegraph.backend.tx.SchemaTransaction; import org.apache.hugegraph.schema.PropertyKey; import org.apache.hugegraph.schema.SchemaElement; @@ -53,7 +54,7 @@ public class PropertyKeyBuilder extends AbstractBuilder private boolean checkExist; private Userdata userdata; - public PropertyKeyBuilder(SchemaTransaction transaction, + public PropertyKeyBuilder(ISchemaTransaction transaction, HugeGraph graph, String name) { super(transaction, graph); E.checkNotNull(name, "name"); @@ -67,7 +68,7 @@ public PropertyKeyBuilder(SchemaTransaction transaction, this.checkExist = true; } - public PropertyKeyBuilder(SchemaTransaction transaction, + public PropertyKeyBuilder(ISchemaTransaction transaction, HugeGraph graph, PropertyKey copy) { super(transaction, graph); E.checkNotNull(copy, "copy"); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/VertexLabelBuilder.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/VertexLabelBuilder.java index c59933f365..42115d8d7c 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/VertexLabelBuilder.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/schema/builder/VertexLabelBuilder.java @@ -29,6 +29,7 @@ import org.apache.hugegraph.backend.id.Id; import org.apache.hugegraph.backend.id.IdGenerator; +import org.apache.hugegraph.backend.tx.ISchemaTransaction; import org.apache.hugegraph.backend.tx.SchemaTransaction; import org.apache.hugegraph.schema.PropertyKey; import org.apache.hugegraph.schema.Userdata; @@ -59,7 +60,7 @@ public class VertexLabelBuilder extends AbstractBuilder private Userdata userdata; private boolean checkExist; - public VertexLabelBuilder(SchemaTransaction transaction, + public VertexLabelBuilder(ISchemaTransaction transaction, HugeGraph graph, String name) { super(transaction, graph); E.checkNotNull(name, "name"); @@ -76,7 +77,7 @@ public VertexLabelBuilder(SchemaTransaction transaction, this.checkExist = true; } - public VertexLabelBuilder(SchemaTransaction transaction, + public VertexLabelBuilder(ISchemaTransaction transaction, HugeGraph graph, VertexLabel copy) { super(transaction, graph); E.checkNotNull(copy, "copy"); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/space/GraphSpace.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/space/GraphSpace.java new file mode 100644 index 0000000000..047bd65f53 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/space/GraphSpace.java @@ -0,0 +1,512 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.space; + +import java.util.Date; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.Map; + +import org.apache.commons.lang.StringUtils; +import org.apache.hugegraph.HugeException; +import org.apache.hugegraph.util.E; + +public class GraphSpace { + + public static final String DEFAULT_GRAPH_SPACE_SERVICE_NAME = "DEFAULT"; + public static final String DEFAULT_NICKNAME = "默认图空间"; + public static final String DEFAULT_GRAPH_SPACE_DESCRIPTION = + "The system default graph space"; + public static final String DEFAULT_CREATOR_NAME = "anonymous"; + + public static final int DEFAULT_CPU_LIMIT = 4; + public static final int DEFAULT_MEMORY_LIMIT = 8; + public static final int DEFAULT_STORAGE_LIMIT = 100; + + public static final int DEFAULT_MAX_GRAPH_NUMBER = 100; + public static final int DEFAULT_MAX_ROLE_NUMBER = 100; + private final String creator; + public int storageLimit; // GB + public String oltpNamespace; + private String name; + private String nickname; + private String description; + private int cpuLimit; + private int memoryLimit; // GB + private int computeCpuLimit; + private int computeMemoryLimit; // GB + private String olapNamespace; + private String storageNamespace; + private int maxGraphNumber; + private int maxRoleNumber; + private Boolean auth; + private Map configs; + private int cpuUsed; + private int memoryUsed; // GB + private int storageUsed; // GB + private int graphNumberUsed; + private int roleNumberUsed; + private String operatorImagePath = ""; // path of compute operator image + private String internalAlgorithmImageUrl = ""; + private Date createTime; + private Date updateTime; + + + public GraphSpace(String name) { + E.checkArgument(name != null && !StringUtils.isEmpty(name), + "The name of graph space can't be null or empty"); + this.name = name; + this.nickname = DEFAULT_NICKNAME; + + this.maxGraphNumber = DEFAULT_MAX_GRAPH_NUMBER; + this.maxRoleNumber = DEFAULT_MAX_ROLE_NUMBER; + + this.cpuLimit = DEFAULT_CPU_LIMIT; + this.memoryLimit = DEFAULT_MEMORY_LIMIT; + this.storageLimit = DEFAULT_STORAGE_LIMIT; + + this.computeCpuLimit = DEFAULT_CPU_LIMIT; + this.computeMemoryLimit = DEFAULT_MEMORY_LIMIT; + + this.auth = false; + this.creator = DEFAULT_CREATOR_NAME; + this.configs = new HashMap<>(); + } + + public GraphSpace(String name, String nickname, String description, + int cpuLimit, + int memoryLimit, int storageLimit, int maxGraphNumber, + int maxRoleNumber, boolean auth, String creator, + Map config) { + E.checkArgument(name != null && !StringUtils.isEmpty(name), + "The name of graph space can't be null or empty"); + E.checkArgument(cpuLimit > 0, "The cpu limit must > 0"); + E.checkArgument(memoryLimit > 0, "The memory limit must > 0"); + E.checkArgument(storageLimit > 0, "The storage limit must > 0"); + E.checkArgument(maxGraphNumber > 0, "The max graph number must > 0"); + this.name = name; + this.nickname = nickname; + this.description = description; + this.cpuLimit = cpuLimit; + this.memoryLimit = memoryLimit; + this.storageLimit = storageLimit; + this.maxGraphNumber = maxGraphNumber; + this.maxRoleNumber = maxRoleNumber; + + this.auth = auth; + if (config == null) { + this.configs = new HashMap<>(); + } else { + this.configs = config; + } + + this.createTime = new Date(); + this.updateTime = this.createTime; + this.creator = creator; + } + + public GraphSpace(String name, String nickname, String description, + int cpuLimit, + int memoryLimit, int storageLimit, int maxGraphNumber, + int maxRoleNumber, String oltpNamespace, + String olapNamespace, String storageNamespace, + int cpuUsed, int memoryUsed, int storageUsed, + int graphNumberUsed, int roleNumberUsed, + boolean auth, String creator, Map config) { + E.checkArgument(name != null && !StringUtils.isEmpty(name), + "The name of graph space can't be null or empty"); + E.checkArgument(cpuLimit > 0, "The cpu limit must > 0"); + E.checkArgument(memoryLimit > 0, "The memory limit must > 0"); + E.checkArgument(storageLimit > 0, "The storage limit must > 0"); + E.checkArgument(maxGraphNumber > 0, "The max graph number must > 0"); + this.name = name; + this.nickname = nickname; + this.description = description; + + this.cpuLimit = cpuLimit; + this.memoryLimit = memoryLimit; + this.storageLimit = storageLimit; + + this.maxGraphNumber = maxGraphNumber; + this.maxRoleNumber = maxRoleNumber; + + this.oltpNamespace = oltpNamespace; + this.olapNamespace = olapNamespace; + this.storageNamespace = storageNamespace; + + this.cpuUsed = cpuUsed; + this.memoryUsed = memoryUsed; + this.storageUsed = storageUsed; + + this.graphNumberUsed = graphNumberUsed; + this.roleNumberUsed = roleNumberUsed; + + this.auth = auth; + this.creator = creator; + + this.configs = new HashMap<>(); + if (config != null) { + this.configs = config; + } + } + + public String name() { + return this.name; + } + + public void name(String name) { + this.name = name; + } + + public String nickname() { + return this.nickname; + } + + public void nickname(String nickname) { + this.nickname = nickname; + } + + public String description() { + return this.description; + } + + public void description(String description) { + this.description = description; + } + + public int cpuLimit() { + return this.cpuLimit; + } + + public void cpuLimit(int cpuLimit) { + E.checkArgument(cpuLimit > 0, + "The cpu limit must be > 0, but got: %s", cpuLimit); + this.cpuLimit = cpuLimit; + } + + public int memoryLimit() { + return this.memoryLimit; + } + + public void memoryLimit(int memoryLimit) { + E.checkArgument(memoryLimit > 0, + "The memory limit must be > 0, but got: %s", + memoryLimit); + this.memoryLimit = memoryLimit; + } + + public int storageLimit() { + return this.storageLimit; + } + + public void storageLimit(int storageLimit) { + E.checkArgument(storageLimit > 0, + "The storage limit must be > 0, but got: %s", + storageLimit); + this.storageLimit = storageLimit; + } + + public void setStorageUsed(int storageUsed) { + this.storageUsed = storageUsed; + } + + public int computeCpuLimit() { + return this.computeCpuLimit; + } + + public void computeCpuLimit(int computeCpuLimit) { + E.checkArgument(computeCpuLimit >= 0, + "The compute cpu limit must be >= 0, but got: %s", computeCpuLimit); + this.computeCpuLimit = computeCpuLimit; + } + + public int computeMemoryLimit() { + return this.computeMemoryLimit; + } + + public void computeMemoryLimit(int computeMemoryLimit) { + E.checkArgument(computeMemoryLimit >= 0, + "The compute memory limit must be >= 0, but got: %s", + computeMemoryLimit); + this.computeMemoryLimit = computeMemoryLimit; + } + + public String oltpNamespace() { + return this.oltpNamespace; + } + + public void oltpNamespace(String oltpNamespace) { + this.oltpNamespace = oltpNamespace; + } + + public String olapNamespace() { + return this.olapNamespace; + } + + public void olapNamespace(String olapNamespace) { + this.olapNamespace = olapNamespace; + } + + public String storageNamespace() { + return this.storageNamespace; + } + + public void storageNamespace(String storageNamespace) { + this.storageNamespace = storageNamespace; + } + + public int maxGraphNumber() { + return this.maxGraphNumber; + } + + public void maxGraphNumber(int maxGraphNumber) { + this.maxGraphNumber = maxGraphNumber; + } + + public int maxRoleNumber() { + return this.maxRoleNumber; + } + + public void maxRoleNumber(int maxRoleNumber) { + this.maxRoleNumber = maxRoleNumber; + } + + public int graphNumberUsed() { + return this.graphNumberUsed; + } + + public void graphNumberUsed(int graphNumberUsed) { + this.graphNumberUsed = graphNumberUsed; + } + + public int roleNumberUsed() { + return this.roleNumberUsed; + } + + public void roleNumberUsed(int roleNumberUsed) { + this.roleNumberUsed = roleNumberUsed; + } + + public boolean auth() { + return this.auth; + } + + public void auth(boolean auth) { + this.auth = auth; + } + + public Map configs() { + return this.configs; + } + + public void configs(Map configs) { + this.configs.putAll(configs); + } + + public void operatorImagePath(String path) { + this.operatorImagePath = path; + } + + public String operatorImagePath() { + return this.operatorImagePath; + } + + public void internalAlgorithmImageUrl(String url) { + if (StringUtils.isNotBlank(url)) { + this.internalAlgorithmImageUrl = url; + } + } + + public String internalAlgorithmImageUrl() { + return this.internalAlgorithmImageUrl; + } + + public Date createTime() { + return this.createTime; + } + + public Date updateTime() { + return this.updateTime; + } + + public String creator() { + return this.creator; + } + + public void updateTime(Date update) { + this.updateTime = update; + } + + public void createTime(Date create) { + this.createTime = create; + } + + public void refreshUpdate() { + this.updateTime = new Date(); + } + + public Map info() { + Map infos = new LinkedHashMap<>(); + infos.put("name", this.name); + infos.put("nickname", this.nickname); + infos.put("description", this.description); + + infos.put("cpu_limit", this.cpuLimit); + infos.put("memory_limit", this.memoryLimit); + infos.put("storage_limit", this.storageLimit); + + infos.put("compute_cpu_limit", this.computeCpuLimit); + infos.put("compute_memory_limit", this.computeMemoryLimit); + + infos.put("oltp_namespace", this.oltpNamespace); + infos.put("olap_namespace", this.olapNamespace); + infos.put("storage_namespace", this.storageNamespace); + + infos.put("max_graph_number", this.maxGraphNumber); + infos.put("max_role_number", this.maxRoleNumber); + + infos.putAll(this.configs); + // sources used info is not automatically updated, it could be + // updated by pdClient of GraphManager + infos.put("cpu_used", this.cpuUsed); + infos.put("memory_used", this.memoryUsed); + infos.put("storage_used", this.storageUsed); + float storageUserPercent = Float.parseFloat( + String.format("%.2f", (float) this.storageUsed / + ((float) this.storageLimit * 1.0))); + infos.put("storage_percent", storageUserPercent); + infos.put("graph_number_used", this.graphNumberUsed); + infos.put("role_number_used", this.roleNumberUsed); + + infos.put("auth", this.auth); + + infos.put("operator_image_path", this.operatorImagePath); + infos.put("internal_algorithm_image_url", this.internalAlgorithmImageUrl); + + infos.put("create_time", this.createTime); + infos.put("update_time", this.updateTime); + infos.put("creator", this.creator); + return infos; + } + + private synchronized void incrCpuUsed(int acquiredCount) { + if (acquiredCount < 0) { + throw new HugeException("cannot increase cpu used since acquired count is negative"); + } + this.cpuUsed += acquiredCount; + } + + private synchronized void decrCpuUsed(int releasedCount) { + if (releasedCount < 0) { + throw new HugeException("cannot decrease cpu used since released count is negative"); + } + if (cpuUsed < releasedCount) { + cpuUsed = 0; + } else { + this.cpuUsed -= releasedCount; + } + } + + private synchronized void incrMemoryUsed(int acquiredCount) { + if (acquiredCount < 0) { + throw new HugeException("cannot increase memory used since acquired count is negative"); + } + this.memoryUsed += acquiredCount; + } + + private synchronized void decrMemoryUsed(int releasedCount) { + if (releasedCount < 0) { + throw new HugeException("cannot decrease memory used since released count is negative"); + } + if (memoryUsed < releasedCount) { + this.memoryUsed = 0; + } else { + this.memoryUsed -= releasedCount; + } + } + + /** + * Only limit the resource usage for oltp service under k8s + * + * @param service + * @return + */ + public boolean tryOfferResourceFor(Service service) { + if (!service.k8s()) { + return true; + } + int count = service.count(); + int leftCpu = this.cpuLimit - this.cpuUsed; + int leftMemory = this.memoryLimit - this.memoryUsed; + int acquiredCpu = service.cpuLimit() * count; + int acquiredMemory = service.memoryLimit() * count; + if (acquiredCpu > leftCpu || + acquiredMemory > leftMemory) { + return false; + } + this.incrCpuUsed(acquiredCpu); + this.incrMemoryUsed(acquiredMemory); + return true; + } + + public void recycleResourceFor(Service service) { + int count = service.count(); + this.decrCpuUsed(service.cpuLimit() * count); + this.decrMemoryUsed(service.memoryLimit() * count); + } + + public boolean tryOfferGraph() { + return this.tryOfferGraph(1); + } + + public boolean tryOfferGraph(int count) { + if (this.graphNumberUsed + count > this.maxGraphNumber) { + return false; + } + this.graphNumberUsed += count; + return true; + } + + public void recycleGraph() { + this.recycleGraph(1); + } + + public void recycleGraph(int count) { + this.graphNumberUsed -= count; + } + + public boolean tryOfferRole() { + return this.tryOfferRole(1); + } + + public boolean tryOfferRole(int count) { + if (this.roleNumberUsed + count > this.maxRoleNumber) { + return false; + } + this.roleNumberUsed += count; + return true; + } + + public void recycleRole() { + this.recycleRole(1); + } + + public void recycleRole(int count) { + this.roleNumberUsed -= count; + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/space/SchemaTemplate.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/space/SchemaTemplate.java new file mode 100644 index 0000000000..d0f5d6c3d4 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/space/SchemaTemplate.java @@ -0,0 +1,140 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.space; + +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.Map; + +import org.apache.hugegraph.util.E; + +import com.google.common.collect.ImmutableMap; + +public class SchemaTemplate { + + public static SimpleDateFormat FORMATTER = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); + protected Date createTime; + protected Date updateTime; + protected String creator; + private final String name; + private String schema; + + public SchemaTemplate(String name, String schema) { + E.checkArgument(name != null && !name.isEmpty(), + "The name of schema template can't be null or empty"); + E.checkArgument(schema != null && !schema.isEmpty(), + "The schema template can't be null or empty"); + this.name = name; + this.schema = schema; + this.createTime = new Date(); + this.updateTime = createTime; + } + + public SchemaTemplate(String name, String schema, Date create, String creator) { + E.checkArgument(name != null && !name.isEmpty(), + "The name of schema template can't be null or empty"); + E.checkArgument(schema != null && !schema.isEmpty(), + "The schema template can't be null or empty"); + this.name = name; + this.schema = schema; + this.createTime = create; + this.updateTime = createTime; + + this.creator = creator; + } + + public static SchemaTemplate fromMap(Map map) { + try { + SchemaTemplate template = new SchemaTemplate(map.get("name"), + map.get("schema"), + FORMATTER.parse(map.get("create")), + map.get("creator")); + + template.updateTime(FORMATTER.parse(map.get("update"))); + return template; + + } catch (ParseException e) { + e.printStackTrace(); + } + + return null; + } + + public String name() { + return this.name; + } + + public String schema() { + return this.schema; + } + + public void schema(String schema) { + this.schema = schema; + } + + public Date create() { + return this.createTime; + } + + public Date createTime() { + return this.createTime; + } + + public Date update() { + return this.updateTime; + } + + public Date updateTime() { + return this.updateTime; + } + + public void create(Date create) { + this.createTime = create; + } + + public String creator() { + return this.creator; + } + + public void creator(String creator) { + this.creator = creator; + } + + public void updateTime(Date updateTime) { + this.updateTime = updateTime; + } + + public void refreshUpdateTime() { + this.updateTime = new Date(); + } + + public Map asMap() { + String createStr = FORMATTER.format(this.createTime); + String updateStr = FORMATTER.format(this.updateTime); + return new ImmutableMap.Builder() + .put("name", this.name) + .put("schema", this.schema) + .put("create", createStr) + .put("create_time", createStr) + .put("update", updateStr) + .put("update_time", updateStr) + .put("creator", this.creator) + .build(); + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/space/Service.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/space/Service.java new file mode 100644 index 0000000000..bfd3fe6b14 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/space/Service.java @@ -0,0 +1,361 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.space; + +import java.util.Date; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Set; + +import org.apache.commons.lang.StringUtils; +import org.apache.hugegraph.util.E; + +public class Service { + + public static final int DEFAULT_COUNT = 1; + public static final String DEFAULT_ROUTE_TYPE = "NodePort"; + public static final int DEFAULT_PORT = 0; + + public static final int DEFAULT_CPU_LIMIT = 4; + public static final int DEFAULT_MEMORY_LIMIT = 8; + public static final int DEFAULT_STORAGE_LIMIT = 100; + private final String creator; + private String name; + private ServiceType type; + private DeploymentType deploymentType; + private String description; + private Status status; + private int count; + private int running; + private int cpuLimit; + private int memoryLimit; // GB + private int storageLimit; // GB + private String routeType; + private int port; + private Set urls = new HashSet<>(); + private Set serverDdsUrls = new HashSet<>(); + private Set serverNodePortUrls = new HashSet<>(); + private String serviceId; + private String pdServiceId; + private Date createTime; + private Date updateTime; + + public Service(String name, String creator, ServiceType type, + DeploymentType deploymentType) { + E.checkArgument(name != null && !StringUtils.isEmpty(name), + "The name of service can't be null or empty"); + E.checkArgumentNotNull(type, "The type of service can't be null"); + E.checkArgumentNotNull(deploymentType, + "The deployment type of service can't be null"); + this.name = name; + this.type = type; + this.deploymentType = deploymentType; + this.status = Status.UNKNOWN; + this.count = DEFAULT_COUNT; + this.running = 0; + this.routeType = DEFAULT_ROUTE_TYPE; + this.port = DEFAULT_PORT; + this.cpuLimit = DEFAULT_CPU_LIMIT; + this.memoryLimit = DEFAULT_MEMORY_LIMIT; + this.storageLimit = DEFAULT_STORAGE_LIMIT; + + this.creator = creator; + this.createTime = new Date(); + this.updateTime = this.createTime; + } + + public Service(String name, String creator, String description, ServiceType type, + DeploymentType deploymentType, int count, int running, + int cpuLimit, int memoryLimit, int storageLimit, + String routeType, int port, Set urls) { + E.checkArgument(name != null && !StringUtils.isEmpty(name), + "The name of service can't be null or empty"); + E.checkArgumentNotNull(type, "The type of service can't be null"); + this.name = name; + this.description = description; + this.type = type; + this.status = Status.UNKNOWN; + this.deploymentType = deploymentType; + this.count = count; + this.running = running; + this.cpuLimit = cpuLimit; + this.memoryLimit = memoryLimit; + this.storageLimit = storageLimit; + this.routeType = routeType; + this.port = port; + this.urls = urls; + + this.creator = creator; + this.createTime = new Date(); + this.updateTime = this.createTime; + } + + public String name() { + return this.name; + } + + public String description() { + return this.description; + } + + public void description(String description) { + this.description = description; + } + + public ServiceType type() { + return this.type; + } + + public void type(ServiceType type) { + this.type = type; + } + + public DeploymentType deploymentType() { + return this.deploymentType; + } + + public void deploymentType(DeploymentType deploymentType) { + this.deploymentType = deploymentType; + } + + public Status status() { + return this.status; + } + + public void status(Status status) { + this.status = status; + } + + public int count() { + return this.count; + } + + public void count(int count) { + E.checkArgument(count > 0, + "The service count must be > 0, but got: %s", count); + this.count = count; + } + + public int running() { + return this.running; + } + + public void running(int running) { + E.checkArgument(running <= this.count, + "The running count must be < count %s, but got: %s", + this.count, running); + this.running = running; + } + + public int cpuLimit() { + return this.cpuLimit; + } + + public void cpuLimit(int cpuLimit) { + E.checkArgument(cpuLimit > 0, + "The cpu limit must be > 0, but got: %s", cpuLimit); + this.cpuLimit = cpuLimit; + } + + public int memoryLimit() { + return this.memoryLimit; + } + + public void memoryLimit(int memoryLimit) { + E.checkArgument(memoryLimit > 0, + "The memory limit must be > 0, but got: %s", + memoryLimit); + this.memoryLimit = memoryLimit; + } + + public int storageLimit() { + return this.storageLimit; + } + + public void storageLimit(int storageLimit) { + E.checkArgument(storageLimit > 0, + "The storage limit must be > 0, but got: %s", + storageLimit); + this.storageLimit = storageLimit; + } + + public String routeType() { + return this.routeType; + } + + public void routeType(String routeType) { + this.routeType = routeType; + } + + public int port() { + return this.port; + } + + public void port(int port) { + this.port = port; + } + + public Set urls() { + if (this.urls == null) { + this.urls = new HashSet<>(); + } + return this.urls; + } + + public void urls(Set urls) { + this.urls = urls; + } + + public Set serverDdsUrls() { + if (this.serverDdsUrls == null) { + this.serverDdsUrls = new HashSet<>(); + } + return this.serverDdsUrls; + } + + public void serverDdsUrls(Set urls) { + this.serverDdsUrls = urls; + } + + public Set serverNodePortUrls() { + if (this.serverNodePortUrls == null) { + this.serverNodePortUrls = new HashSet<>(); + } + return this.serverNodePortUrls; + } + + public void serverNodePortUrls(Set urls) { + this.serverNodePortUrls = urls; + } + + public void url(String url) { + if (this.urls == null) { + this.urls = new HashSet<>(); + } + this.urls.add(url); + } + + public boolean manual() { + return DeploymentType.MANUAL.equals(this.deploymentType); + } + + public boolean k8s() { + return DeploymentType.K8S.equals(this.deploymentType); + } + + public String creator() { + return this.creator; + } + + public Date createdTime() { + return this.createTime; + } + + public Date updateTime() { + return this.updateTime; + } + + public void createTime(Date create) { + this.createTime = create; + } + + public void updateTime(Date update) { + this.updateTime = update; + } + + public void refreshUpdate() { + this.updateTime = new Date(); + } + + public boolean sameService(Service other) { + if (other.deploymentType == DeploymentType.K8S || + this.deploymentType == DeploymentType.K8S) { + return true; + } + return (this.name.equals(other.name) && + this.type.equals(other.type) && + this.deploymentType == other.deploymentType && + this.urls.equals(other.urls) && + this.port == other.port); + } + + public Map info() { + Map infos = new LinkedHashMap<>(); + infos.put("name", this.name); + infos.put("type", this.type); + infos.put("deployment_type", this.deploymentType); + infos.put("description", this.description); + infos.put("status", this.status); + infos.put("count", this.count); + infos.put("running", this.running); + + infos.put("cpu_limit", this.cpuLimit); + infos.put("memory_limit", this.memoryLimit); + infos.put("storage_limit", this.storageLimit); + + infos.put("route_type", this.routeType); + infos.put("port", this.port); + infos.put("urls", this.urls); + infos.put("server_dds_urls", this.serverDdsUrls); + infos.put("server_node_port_urls", this.serverNodePortUrls); + + infos.put("service_id", this.serviceId); + infos.put("pd_service_id", this.pdServiceId); + + infos.put("creator", this.creator); + infos.put("create_time", this.createTime); + infos.put("update_time", this.updateTime); + + return infos; + } + + public String serviceId() { + return this.serviceId; + } + + public void serviceId(String serviceId) { + this.serviceId = serviceId; + } + + public String pdServiceId() { + return this.pdServiceId; + } + + public void pdServiceId(String serviceId) { + this.pdServiceId = serviceId; + } + + public enum DeploymentType { + MANUAL, + K8S, + } + + public enum ServiceType { + OLTP, + OLAP, + STORAGE + } + + public enum Status { + UNKNOWN, + STARTING, + RUNNING, + STOPPED + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeEdge.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeEdge.java index d08c4c21a3..275a8c54fa 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeEdge.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeEdge.java @@ -540,4 +540,69 @@ public static HugeEdge constructEdge(HugeVertex ownerVertex, return edge; } + + public static HugeEdge constructEdgeWithoutLabel(HugeVertex ownerVertex, + boolean isOutEdge, + String sortValues, + Id otherVertexId) { + HugeGraph graph = ownerVertex.graph(); + HugeVertex otherVertex = new HugeVertex(graph, otherVertexId, + VertexLabel.NONE); + ownerVertex.propNotLoaded(); + otherVertex.propNotLoaded(); + + HugeEdge edge = new HugeEdge(graph, null, EdgeLabel.NONE); + edge.name(sortValues); + edge.vertices(isOutEdge, ownerVertex, otherVertex); + edge.assignId(); + + if (isOutEdge) { + ownerVertex.addOutEdge(edge); + otherVertex.addInEdge(edge.switchOwner()); + } else { + ownerVertex.addInEdge(edge); + otherVertex.addOutEdge(edge.switchOwner()); + } + + return edge; + } + + public static HugeEdge constructEdgeWithoutGraph(HugeVertex ownerVertex, + boolean isOutEdge, + EdgeLabel edgeLabel, + String sortValues, + Id otherVertexId) { + Id ownerLabelId = edgeLabel.sourceLabel(); + Id otherLabelId = edgeLabel.targetLabel(); + VertexLabel srcLabel = new VertexLabel(null, ownerLabelId, "UNDEF"); + VertexLabel tgtLabel = new VertexLabel(null, otherLabelId, "UNDEF"); + + VertexLabel otherVertexLabel; + if (isOutEdge) { + ownerVertex.correctVertexLabel(srcLabel); + otherVertexLabel = tgtLabel; + } else { + ownerVertex.correctVertexLabel(tgtLabel); + otherVertexLabel = srcLabel; + } + HugeVertex otherVertex = new HugeVertex(null, otherVertexId, + otherVertexLabel); + ownerVertex.propNotLoaded(); + otherVertex.propNotLoaded(); + + HugeEdge edge = new HugeEdge(null, null, edgeLabel); + edge.name(sortValues); + edge.vertices(isOutEdge, ownerVertex, otherVertex); + edge.assignId(); + + if (isOutEdge) { + ownerVertex.addOutEdge(edge); + otherVertex.addInEdge(edge.switchOwner()); + } else { + ownerVertex.addInEdge(edge); + otherVertex.addOutEdge(edge.switchOwner()); + } + + return edge; + } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeIndex.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeIndex.java index 4f5d8d04c4..d1c7b1bf06 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeIndex.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeIndex.java @@ -44,6 +44,7 @@ public class HugeIndex implements GraphType, Cloneable { private Object fieldValues; private IndexLabel indexLabel; private Set elementIds; + private static final int HUGE_TYPE_CODE_LENGTH = 1; public HugeIndex(HugeGraph graph, IndexLabel indexLabel) { E.checkNotNull(graph, "graph"); @@ -210,11 +211,12 @@ public static Id formatIndexId(HugeType type, Id indexLabelId, * index label in front(hugegraph-1317) */ String strIndexLabelId = IdGenerator.asStoredString(indexLabelId); - return SplicingIdGenerator.splicing(strIndexLabelId, value); + return SplicingIdGenerator.splicing(type.string(), strIndexLabelId, value); } else { assert type.isRangeIndex(); int length = type.isRange4Index() ? 4 : 8; - BytesBuffer buffer = BytesBuffer.allocate(4 + length); + BytesBuffer buffer = BytesBuffer.allocate(HUGE_TYPE_CODE_LENGTH + 4 + length); + buffer.write(type.code()); buffer.writeInt(SchemaElement.schemaId(indexLabelId)); if (fieldValues != null) { E.checkState(fieldValues instanceof Number, @@ -234,15 +236,16 @@ public static HugeIndex parseIndexId(HugeGraph graph, HugeType type, if (type.isStringIndex()) { Id idObject = IdGenerator.of(id, IdType.STRING); String[] parts = SplicingIdGenerator.parse(idObject); - E.checkState(parts.length == 2, "Invalid secondary index id"); - Id label = IdGenerator.ofStoredString(parts[0], IdType.LONG); + E.checkState(parts.length == 3, "Invalid secondary index id"); + Id label = IdGenerator.ofStoredString(parts[1], IdType.LONG); indexLabel = IndexLabel.label(graph, label); - values = parts[1]; + values = parts[2]; } else { assert type.isRange4Index() || type.isRange8Index(); final int labelLength = 4; E.checkState(id.length > labelLength, "Invalid range index id"); BytesBuffer buffer = BytesBuffer.wrap(id); + buffer.read(HUGE_TYPE_CODE_LENGTH); Id label = IdGenerator.of(buffer.readInt()); indexLabel = IndexLabel.label(graph, label); List fields = indexLabel.indexFields(); @@ -252,7 +255,7 @@ public static HugeIndex parseIndexId(HugeGraph graph, HugeType type, "Invalid range index field type"); Class clazz = dataType.isNumber() ? dataType.clazz() : DataType.LONG.clazz(); - values = bytes2number(buffer.read(id.length - labelLength), clazz); + values = bytes2number(buffer.read(id.length - labelLength - HUGE_TYPE_CODE_LENGTH), clazz); } HugeIndex index = new HugeIndex(graph, indexLabel); index.fieldValues(values); diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeVertex.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeVertex.java index c270312b09..731bfa4446 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeVertex.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/structure/HugeVertex.java @@ -43,6 +43,8 @@ import org.apache.hugegraph.schema.EdgeLabel; import org.apache.hugegraph.schema.PropertyKey; import org.apache.hugegraph.schema.VertexLabel; +import org.apache.hugegraph.task.HugeServerInfo; +import org.apache.hugegraph.task.HugeTask; import org.apache.hugegraph.type.HugeType; import org.apache.hugegraph.type.define.Cardinality; import org.apache.hugegraph.type.define.CollectionType; @@ -90,6 +92,12 @@ public HugeVertex(final HugeGraph graph, Id id, VertexLabel label) { @Override public HugeType type() { + if (label != null && label.name().equals(HugeTask.P.TASK)) { + return HugeType.TASK; + } + if (label != null && label.name().equals(HugeServerInfo.P.SERVER)) { + return HugeType.SERVER; + } return HugeType.VERTEX; } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/ServerInfoManager.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/ServerInfoManager.java index e8cccf88e3..b33dc0f45b 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/ServerInfoManager.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/ServerInfoManager.java @@ -319,7 +319,7 @@ private HugeServerInfo selfServerInfo() { private HugeServerInfo serverInfo(Id server) { return this.call(() -> { - Iterator vertices = this.tx().queryVertices(server); + Iterator vertices = this.tx().queryServerInfos(server); Vertex vertex = QueryResults.one(vertices); if (vertex == null) { return null; @@ -347,7 +347,7 @@ private HugeServerInfo removeServerInfo(Id server) { } LOG.info("Remove server info: {}", server); return this.call(() -> { - Iterator vertices = this.tx().queryVertices(server); + Iterator vertices = this.tx().queryServerInfos(server); Vertex vertex = QueryResults.one(vertices); if (vertex == null) { return null; @@ -382,7 +382,12 @@ protected Iterator serverInfos(long limit, String page) { private Iterator serverInfos(Map conditions, long limit, String page) { return this.call(() -> { - ConditionQuery query = new ConditionQuery(HugeType.VERTEX); + ConditionQuery query; + if (this.graph.backendStoreFeatures().supportsTaskAndServerVertex()) { + query = new ConditionQuery(HugeType.SERVER); + } else { + query = new ConditionQuery(HugeType.VERTEX); + } if (page != null) { query.page(page); } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/StandardTaskScheduler.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/StandardTaskScheduler.java index 9eda3f6b02..50d0a13c7d 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/StandardTaskScheduler.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/task/StandardTaskScheduler.java @@ -523,7 +523,7 @@ public Iterator> tasks(TaskStatus status, public HugeTask findTask(Id id) { HugeTask result = this.call(() -> { - Iterator vertices = this.tx().queryVertices(id); + Iterator vertices = this.tx().queryTaskInfos(id); Vertex vertex = QueryResults.one(vertices); if (vertex == null) { return null; @@ -573,7 +573,7 @@ public HugeTask delete(Id id) { } return this.call(() -> { - Iterator vertices = this.tx().queryVertices(id); + Iterator vertices = this.tx().queryTaskInfos(id); HugeVertex vertex = (HugeVertex) QueryResults.one(vertices); if (vertex == null) { return null; @@ -666,7 +666,12 @@ private Iterator> queryTask(String key, Object value, private Iterator> queryTask(Map conditions, long limit, String page) { return this.call(() -> { - ConditionQuery query = new ConditionQuery(HugeType.VERTEX); + ConditionQuery query; + if (this.graph.backendStoreFeatures().supportsTaskAndServerVertex()) { + query = new ConditionQuery(HugeType.TASK); + } else { + query = new ConditionQuery(HugeType.VERTEX); + } if (page != null) { query.page(page); } @@ -691,7 +696,7 @@ private Iterator> queryTask(Map conditions, private Iterator> queryTask(List ids) { return this.call(() -> { Object[] idArray = ids.toArray(new Id[0]); - Iterator vertices = this.tx().queryVertices(idArray); + Iterator vertices = this.tx().queryTaskInfos(idArray); Iterator> tasks = new MapperIterator<>(vertices, HugeTask::fromVertex); // Convert iterator to list to avoid across thread tx accessed @@ -756,7 +761,7 @@ public HugeVertex constructVertex(HugeTask task) { public void deleteIndex(HugeVertex vertex) { // Delete the old record if exist - Iterator old = this.queryVertices(vertex.id()); + Iterator old = this.queryTaskInfos(vertex.id()); HugeVertex oldV = (HugeVertex) QueryResults.one(old); if (oldV == null) { return; diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/type/HugeTableType.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/type/HugeTableType.java new file mode 100644 index 0000000000..a3edf746ff --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/type/HugeTableType.java @@ -0,0 +1,64 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.type; + +import java.util.HashMap; +import java.util.Map; + +import org.apache.hugegraph.type.define.SerialEnum; + +public enum HugeTableType implements SerialEnum { + + UNKNOWN(0, "UNKNOWN"), + + /* Schema types */ + VERTEX(1, "V"), // 顶点表 + OUT_EDGE(2, "OE"), // 出边表 + IN_EDGE(3, "IE"), // 入边表 + ALL_INDEX_TABLE(4, "INDEX"), // 索引表 + TASK_INFO_TABLE(5, "TASK"), // 任务信息表 + OLAP_TABLE(6, "OLAP"), // OLAP 表 + SERVER_INFO_TABLE(7, "SERVER"); // SERVER 信息表 + + private static final Map ALL_NAME = new HashMap<>(); + + static { + SerialEnum.register(HugeTableType.class); + for (HugeTableType type : values()) { + ALL_NAME.put(type.name, type); + } + } + + private byte type = 0; + private String name; + + HugeTableType(int type, String name) { + assert type < 256; + this.type = (byte) type; + this.name = name; + } + + @Override + public byte code() { + return this.type; + } + + public String string() { + return this.name; + } +} diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/type/HugeType.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/type/HugeType.java index e23642b4ee..87a10a65d3 100644 --- a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/type/HugeType.java +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/type/HugeType.java @@ -65,7 +65,8 @@ public enum HugeType implements SerialEnum { SHARD_INDEX(175, "HI"), UNIQUE_INDEX(178, "UI"), - TASK(180, "T"), + TASK(180, "TASK"), + SERVER(181, "SERVER"), // System schema SYS_SCHEMA(250, "SS"), @@ -115,7 +116,7 @@ public boolean isGraph() { } public boolean isVertex() { - return this == HugeType.VERTEX; + return this == HugeType.VERTEX || this == HugeType.TASK || this == HugeType.SERVER; } public boolean isEdge() { @@ -192,4 +193,8 @@ public static HugeType fromString(String type) { public static HugeType fromCode(byte code) { return SerialEnum.fromCode(HugeType.class, code); } + + public boolean isLabelIndex() { + return this == VERTEX_LABEL_INDEX || this == EDGE_LABEL_INDEX; + } } diff --git a/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/type/define/EdgeLabelType.java b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/type/define/EdgeLabelType.java new file mode 100644 index 0000000000..912ed43d55 --- /dev/null +++ b/hugegraph-server/hugegraph-core/src/main/java/org/apache/hugegraph/type/define/EdgeLabelType.java @@ -0,0 +1,70 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.type.define; + +public enum EdgeLabelType implements SerialEnum { + + + NORMAL(1, "NORMAL"), + + PARENT(2, "PARENT"), + + SUB(3, "SUB"), + + GENERAL(4, "GENERAL"), + ; + + static { + SerialEnum.register(EdgeLabelType.class); + } + + private final byte code; + private final String name; + + EdgeLabelType(int code, String name) { + assert code < 256; + this.code = (byte) code; + this.name = name; + } + + @Override + public byte code() { + return this.code; + } + + public String string() { + return this.name; + } + + public boolean normal() { + return this == NORMAL; + } + + public boolean parent() { + return this == PARENT; + } + + public boolean sub() { + return this == SUB; + } + + public boolean general() { + return this == GENERAL; + } + +} diff --git a/hugegraph-server/hugegraph-dist/pom.xml b/hugegraph-server/hugegraph-dist/pom.xml index e9f8e3c431..fe2287f590 100644 --- a/hugegraph-server/hugegraph-dist/pom.xml +++ b/hugegraph-server/hugegraph-dist/pom.xml @@ -98,6 +98,11 @@ hugegraph-postgresql ${revision} + + org.apache.hugegraph + hugegraph-hstore + ${revision} + org.apache.tinkerpop diff --git a/hugegraph-server/hugegraph-dist/src/assembly/static/conf/graphs/hugegraph.properties b/hugegraph-server/hugegraph-dist/src/assembly/static/conf/graphs/hugegraph.properties index 5f77efe939..74e1408c70 100644 --- a/hugegraph-server/hugegraph-dist/src/assembly/static/conf/graphs/hugegraph.properties +++ b/hugegraph-server/hugegraph-dist/src/assembly/static/conf/graphs/hugegraph.properties @@ -19,7 +19,7 @@ edge.cache_type=l2 #vertex.default_label=vertex -backend=rocksdb +backend=hstore serializer=binary store=hugegraph diff --git a/hugegraph-server/hugegraph-dist/src/main/java/org/apache/hugegraph/dist/RegisterUtil.java b/hugegraph-server/hugegraph-dist/src/main/java/org/apache/hugegraph/dist/RegisterUtil.java index 34e5ea3cf6..9a3b632d4d 100644 --- a/hugegraph-server/hugegraph-dist/src/main/java/org/apache/hugegraph/dist/RegisterUtil.java +++ b/hugegraph-server/hugegraph-dist/src/main/java/org/apache/hugegraph/dist/RegisterUtil.java @@ -91,6 +91,9 @@ private static void registerBackend(String backend) { case "postgresql": registerPostgresql(); break; + case "hstore": + registerHstore(); + break; default: throw new HugeException("Unsupported backend type '%s'", backend); } @@ -215,4 +218,13 @@ public static void registerPlugins() { } } } + + public static void registerHstore() { + // Register config + OptionSpace.register("hstore", + "org.apache.hugegraph.backend.store.hstore.HstoreOptions"); + // Register backend + BackendProviderFactory.register("hstore", + "org.apache.hugegraph.backend.store.hstore.HstoreProvider"); + } } diff --git a/hugegraph-server/hugegraph-dist/src/main/resources/backend.properties b/hugegraph-server/hugegraph-dist/src/main/resources/backend.properties index aa49ed14b2..c2dd5df2a8 100644 --- a/hugegraph-server/hugegraph-dist/src/main/resources/backend.properties +++ b/hugegraph-server/hugegraph-dist/src/main/resources/backend.properties @@ -15,4 +15,4 @@ # under the License. # -backends=[cassandra, scylladb, rocksdb, mysql, palo, hbase, postgresql] +backends=[cassandra, scylladb, rocksdb, mysql, palo, hbase, postgresql, hstore] diff --git a/hugegraph-server/hugegraph-example/src/main/java/org/apache/hugegraph/example/ExampleNew.java b/hugegraph-server/hugegraph-example/src/main/java/org/apache/hugegraph/example/ExampleNew.java new file mode 100644 index 0000000000..1c9b4413b3 --- /dev/null +++ b/hugegraph-server/hugegraph-example/src/main/java/org/apache/hugegraph/example/ExampleNew.java @@ -0,0 +1,51 @@ +package org.apache.hugegraph.example; + +import static org.apache.hugegraph.backend.page.PageState.EMPTY_BYTES; + +import org.apache.hugegraph.pd.client.PDConfig; +import org.apache.hugegraph.store.HgKvEntry; +import org.apache.hugegraph.store.HgKvIterator; +import org.apache.hugegraph.store.HgKvStore; +import org.apache.hugegraph.store.HgStoreClient; +import org.apache.hugegraph.store.HgStoreSession; + +public class ExampleNew { + public static void main(String[] args) throws Exception { + testScanTable("hugegraph", "g+v"); // why should with "g+"? + } + + private static void testScanTable(String graph, String table) { + /* + * Valid table is: + * g+v + * g+oe + * g+ie + * g+olap + * g+task + * g+index + * g+server + */ + HgStoreClient storeClient = HgStoreClient.create( + PDConfig.of("127.0.0.1:8686").setEnableCache(false)); + String storeTemplate = "%s/g"; + String store = String.format(storeTemplate, graph); + HgStoreSession session = storeClient.openSession(store); + + try (HgKvIterator iterators = session.scanIterator(table, + 0, 100000000, + HgKvStore.SCAN_HASHCODE, + EMPTY_BYTES)) { + int count = 0; + while (iterators.hasNext()) { + count++; + HgKvEntry next = iterators.next(); + System.out.println(new String(next.key()) + + " <====> " + + new String(next.value())); + } + System.out.println(count); + } catch (Exception e) { + e.printStackTrace(); + } + } +} diff --git a/hugegraph-server/hugegraph-hbase/src/main/java/org/apache/hugegraph/backend/store/hbase/HbaseStore.java b/hugegraph-server/hugegraph-hbase/src/main/java/org/apache/hugegraph/backend/store/hbase/HbaseStore.java index 4ec7f7db7f..4fa738ce4c 100644 --- a/hugegraph-server/hugegraph-hbase/src/main/java/org/apache/hugegraph/backend/store/hbase/HbaseStore.java +++ b/hugegraph-server/hugegraph-hbase/src/main/java/org/apache/hugegraph/backend/store/hbase/HbaseStore.java @@ -96,7 +96,7 @@ protected void registerTableManager(HugeType type, HbaseTable table) { @Override protected final HbaseTable table(HugeType type) { assert type != null; - HbaseTable table = this.tables.get(type); + HbaseTable table = this.tables.get(convertTaskOrServerToVertex(type)); if (table == null) { throw new BackendException("Unsupported table type: %s", type); } diff --git a/hugegraph-server/hugegraph-hstore/pom.xml b/hugegraph-server/hugegraph-hstore/pom.xml new file mode 100644 index 0000000000..f777eb05ef --- /dev/null +++ b/hugegraph-server/hugegraph-hstore/pom.xml @@ -0,0 +1,50 @@ + + + + + hugegraph-server + org.apache.hugegraph + ${revision} + ../pom.xml + + + 4.0.0 + + hugegraph-hstore + + + + org.apache.hugegraph + hugegraph-core + ${revision} + + + org.apache.hugegraph + hg-store-client + ${revision} + + + org.apache.hugegraph + hg-pd-client + ${revision} + + + + diff --git a/hugegraph-server/hugegraph-hstore/src/main/java/org/apache/hugegraph/backend/store/hstore/HstoreFeatures.java b/hugegraph-server/hugegraph-hstore/src/main/java/org/apache/hugegraph/backend/store/hstore/HstoreFeatures.java new file mode 100644 index 0000000000..3af6f803bc --- /dev/null +++ b/hugegraph-server/hugegraph-hstore/src/main/java/org/apache/hugegraph/backend/store/hstore/HstoreFeatures.java @@ -0,0 +1,133 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.backend.store.hstore; + +import org.apache.hugegraph.backend.store.BackendFeatures; + +public class HstoreFeatures implements BackendFeatures { + + @Override + public boolean supportsScanToken() { + return false; + } + + @Override + public boolean supportsScanKeyPrefix() { + return true; + } + + @Override + public boolean supportsScanKeyRange() { + return true; + } + + @Override + public boolean supportsQuerySchemaByName() { + return false; + } + + @Override + public boolean supportsQueryByLabel() { + return false; + } + + @Override + public boolean supportsQueryWithInCondition() { + return false; + } + + @Override + public boolean supportsQueryWithRangeCondition() { + return true; + } + + @Override + public boolean supportsQuerySortByInputIds() { + return true; + } + + @Override + public boolean supportsQueryWithOrderBy() { + return true; + } + + @Override + public boolean supportsQueryWithContains() { + return false; + } + + @Override + public boolean supportsQueryWithContainsKey() { + return false; + } + + @Override + public boolean supportsQueryByPage() { + return true; + } + + @Override + public boolean supportsDeleteEdgeByLabel() { + return false; + } + + @Override + public boolean supportsUpdateVertexProperty() { + // Vertex properties are stored in a cell(column value) + return false; + } + + @Override + public boolean supportsMergeVertexProperty() { + return false; + } + + @Override + public boolean supportsUpdateEdgeProperty() { + // Edge properties are stored in a cell(column value) + return false; + } + + @Override + public boolean supportsTransaction() { + return false; + } + + @Override + public boolean supportsNumberType() { + return false; + } + + @Override + public boolean supportsAggregateProperty() { + return false; + } + + @Override + public boolean supportsTtl() { + return false; + } + + @Override + public boolean supportsOlapProperties() { + return true; + } + + @Override + public boolean supportsTaskAndServerVertex() { return true; } +} diff --git a/hugegraph-server/hugegraph-hstore/src/main/java/org/apache/hugegraph/backend/store/hstore/HstoreNodePartitionerImpl.java b/hugegraph-server/hugegraph-hstore/src/main/java/org/apache/hugegraph/backend/store/hstore/HstoreNodePartitionerImpl.java new file mode 100644 index 0000000000..2a69fe1c03 --- /dev/null +++ b/hugegraph-server/hugegraph-hstore/src/main/java/org/apache/hugegraph/backend/store/hstore/HstoreNodePartitionerImpl.java @@ -0,0 +1,279 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.backend.store.hstore; + +import static org.apache.hugegraph.store.client.util.HgStoreClientConst.ALL_PARTITION_OWNER; + +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +import org.apache.hugegraph.config.HugeConfig; +import org.apache.hugegraph.pd.client.PDClient; +import org.apache.hugegraph.pd.common.KVPair; +import org.apache.hugegraph.pd.common.PDException; +import org.apache.hugegraph.pd.common.PartitionUtils; +import org.apache.hugegraph.pd.grpc.Metapb; +import org.apache.hugegraph.store.client.HgNodePartition; +import org.apache.hugegraph.store.client.HgNodePartitionerBuilder; +import org.apache.hugegraph.store.client.HgStoreNode; +import org.apache.hugegraph.store.client.HgStoreNodeManager; +import org.apache.hugegraph.store.client.HgStoreNodeNotifier; +import org.apache.hugegraph.store.client.HgStoreNodePartitioner; +import org.apache.hugegraph.store.client.HgStoreNodeProvider; +import org.apache.hugegraph.store.client.HgStoreNotice; +import org.apache.hugegraph.store.client.type.HgNodeStatus; +import org.apache.hugegraph.store.client.util.HgStoreClientConst; +import org.apache.hugegraph.util.Log; +import org.slf4j.Logger; + +public class HstoreNodePartitionerImpl implements HgStoreNodePartitioner, + HgStoreNodeProvider, + HgStoreNodeNotifier { + + private static final Logger LOG = Log.logger(HstoreNodePartitionerImpl.class); + private PDClient pdClient; + private HgStoreNodeManager nodeManager; + + protected HstoreNodePartitionerImpl() { + + } + + public HstoreNodePartitionerImpl(String pdPeers) { + pdClient = HstoreSessionsImpl.getDefaultPdClient(); + } + + public HstoreNodePartitionerImpl(HgStoreNodeManager nodeManager, + String pdPeers) { + this(pdPeers); + this.nodeManager = nodeManager; + } + + public void setPDClient(PDClient pdClient) { + this.pdClient = pdClient; + } + + /** + * 查询分区信息,结果通过HgNodePartitionerBuilder返回 + */ + @Override + public int partition(HgNodePartitionerBuilder builder, String graphName, + byte[] startKey, byte[] endKey) { + try { + HashSet partitions = null; + if (HgStoreClientConst.ALL_PARTITION_OWNER == startKey) { + List stores = pdClient.getActiveStores(graphName); + partitions = new HashSet<>(stores.size()); + for (Metapb.Store store : stores) { + partitions.add(HgNodePartition.of(store.getId(), -1)); + } + + } else if (endKey == HgStoreClientConst.EMPTY_BYTES + || startKey == endKey || Arrays.equals(startKey, endKey)) { + KVPair partShard = + pdClient.getPartition(graphName, startKey); + Metapb.Shard leader = partShard.getValue(); + partitions = new HashSet<>(1); + partitions.add(HgNodePartition.of(leader.getStoreId(), + pdClient.keyToCode(graphName, startKey))); + } else { + LOG.warn( + "StartOwnerkey is not equal to endOwnerkey, which is meaningless!!, It is" + + " a error!!"); + List stores = pdClient.getActiveStores(graphName); + for (Metapb.Store store : stores) { + partitions.add(HgNodePartition.of(store.getId(), -1)); + } + } + builder.setPartitions(partitions); + } catch (PDException e) { + LOG.error("An error occurred while getting partition information :{}", e.getMessage()); + throw new RuntimeException(e.getMessage(), e); + } + return 0; + } + + @Override + public int partition(HgNodePartitionerBuilder builder, String graphName, + int startKey, int endKey) { + try { + HashSet partitions = new HashSet<>(); + Metapb.Partition partition = null; + while ((partition == null || partition.getEndKey() < endKey) + && startKey < PartitionUtils.MAX_VALUE) { + KVPair partShard = + pdClient.getPartitionByCode(graphName, startKey); + if (partShard != null) { + partition = partShard.getKey(); + Metapb.Shard leader = partShard.getValue(); + partitions.add(HgNodePartition.of(leader.getStoreId(), startKey, + (int) partition.getStartKey(), + (int) partition.getEndKey())); + startKey = (int) partition.getEndKey(); + } else { + break; + } + } + builder.setPartitions(partitions); + } catch (PDException e) { + LOG.error("An error occurred while getting partition information :{}", e.getMessage()); + throw new RuntimeException(e.getMessage(), e); + } + return 0; + + } + + /** + * 查询hgstore信息 + * + * @return hgstore + */ + @Override + public HgStoreNode apply(String graphName, Long nodeId) { + try { + Metapb.Store store = pdClient.getStore(nodeId); + return nodeManager.getNodeBuilder().setNodeId(store.getId()) + .setAddress(store.getAddress()).build(); + } catch (PDException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + + /** + * 通知更新缓存 + */ + @Override + public int notice(String graphName, HgStoreNotice storeNotice) { + LOG.warn(storeNotice.toString()); + if (storeNotice.getPartitionLeaders() != null) { + storeNotice.getPartitionLeaders().forEach((partId, leader) -> { + pdClient.updatePartitionLeader(graphName, partId, leader); + LOG.warn("updatePartitionLeader:{}-{}-{}", + graphName, partId, leader); + }); + } + if (storeNotice.getPartitionIds() != null) { + storeNotice.getPartitionIds().forEach(partId -> { + pdClient.invalidPartitionCache(graphName, partId); + }); + } + if (!storeNotice.getNodeStatus().equals( + HgNodeStatus.PARTITION_COMMON_FAULT) + && !storeNotice.getNodeStatus().equals( + HgNodeStatus.NOT_PARTITION_LEADER)) { + pdClient.invalidPartitionCache(); + LOG.warn("invalidPartitionCache:{} ", storeNotice.getNodeStatus()); + } + return 0; + } + + public Metapb.Graph delGraph(String graphName) { + try { + return pdClient.delGraph(graphName); + } catch (PDException e) { + LOG.error("delGraph {} exception, {}", graphName, e.getMessage()); + } + return null; + } + + public void setNodeManager(HgStoreNodeManager nodeManager) { + this.nodeManager = nodeManager; + } +} + +class FakeHstoreNodePartitionerImpl extends HstoreNodePartitionerImpl { + private static final Logger LOG = Log.logger(HstoreNodePartitionerImpl.class); + private static final int partitionCount = 3; + private static final Map leaderMap = new ConcurrentHashMap<>(); + private static final Map storeMap = new ConcurrentHashMap<>(); + HgStoreNodeManager nodeManager; + private final String hstorePeers; + + public FakeHstoreNodePartitionerImpl(String pdPeers) { + this.hstorePeers = pdPeers; + // store列表 + for (String address : hstorePeers.split(",")) { + storeMap.put((long) address.hashCode(), address); + } + // 分区列表 + for (int i = 0; i < partitionCount; i++) { + leaderMap.put(i, storeMap.keySet().iterator().next()); + } + } + + public FakeHstoreNodePartitionerImpl(HgStoreNodeManager nodeManager, + String peers) { + this(peers); + this.nodeManager = nodeManager; + } + + @Override + public int partition(HgNodePartitionerBuilder builder, String graphName, + byte[] startKey, byte[] endKey) { + int startCode = PartitionUtils.calcHashcode(startKey); + HashSet partitions = new HashSet<>(storeMap.size()); + if (ALL_PARTITION_OWNER == startKey) { + storeMap.forEach((k, v) -> { + partitions.add(HgNodePartition.of(k, -1)); + }); + } else if (endKey == HgStoreClientConst.EMPTY_BYTES || startKey == endKey || + Arrays.equals(startKey, endKey)) { + partitions.add( + HgNodePartition.of(leaderMap.get(startCode % partitionCount), startCode)); + } else { + LOG.error("OwnerKey转成HashCode后已经无序了, 按照OwnerKey范围查询没意义"); + storeMap.forEach((k, v) -> { + partitions.add(HgNodePartition.of(k, -1)); + }); + } + builder.setPartitions(partitions); + return 0; + } + + @Override + public HgStoreNode apply(String graphName, Long nodeId) { + return nodeManager.getNodeBuilder().setNodeId(nodeId) + .setAddress(storeMap.get(nodeId)).build(); + } + + @Override + public int notice(String graphName, HgStoreNotice storeNotice) { + if (storeNotice.getPartitionLeaders() != null + && storeNotice.getPartitionLeaders().size() > 0) { + leaderMap.putAll(storeNotice.getPartitionLeaders()); + } + return 0; + } + + public static class NodePartitionerFactory { + public static HstoreNodePartitionerImpl getNodePartitioner( + HugeConfig config, HgStoreNodeManager nodeManager) { + if (config.get(HstoreOptions.PD_FAKE)) { + return new FakeHstoreNodePartitionerImpl(nodeManager, + config.get(HstoreOptions.HSTORE_PEERS)); + } else { + return new HstoreNodePartitionerImpl(nodeManager, + config.get(HstoreOptions.PD_PEERS) + ); + } + + } + } +} diff --git a/hugegraph-server/hugegraph-hstore/src/main/java/org/apache/hugegraph/backend/store/hstore/HstoreOptions.java b/hugegraph-server/hugegraph-hstore/src/main/java/org/apache/hugegraph/backend/store/hstore/HstoreOptions.java new file mode 100644 index 0000000000..bafde45f82 --- /dev/null +++ b/hugegraph-server/hugegraph-hstore/src/main/java/org/apache/hugegraph/backend/store/hstore/HstoreOptions.java @@ -0,0 +1,70 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.backend.store.hstore; + +import static org.apache.hugegraph.config.OptionChecker.disallowEmpty; + +import org.apache.hugegraph.config.ConfigOption; +import org.apache.hugegraph.config.OptionHolder; + +public class HstoreOptions extends OptionHolder { + + public static final ConfigOption PD_PEERS = new ConfigOption<>( + "pd.peers", + "The addresses of pd nodes, separated with commas.", + disallowEmpty(), + "localhost:8686" + ); + public static final ConfigOption PD_FAKE = new ConfigOption<>( + "pd.fake", + "Enable the fake PD service.", + disallowEmpty(), + false + ); + public static final ConfigOption HSTORE_PEERS = new ConfigOption<>( + "hstore.peers", + "The addresses of store nodes, separated with commas.", + disallowEmpty(), + "localhost:9080" + ); + public static final ConfigOption PARTITION_COUNT = new ConfigOption<>( + "hstore.partition_count", + "Number of partitions, which PD controls partitions based on.", + disallowEmpty(), + 0 + ); + public static final ConfigOption SHARD_COUNT = new ConfigOption<>( + "hstore.shard_count", + "Number of copies, which PD controls partition copies based on.", + disallowEmpty(), + 0 + ); + private static volatile HstoreOptions instance; + + private HstoreOptions() { + super(); + } + + public static synchronized HstoreOptions instance() { + if (instance == null) { + instance = new HstoreOptions(); + instance.registerOptions(); + } + return instance; + } +} diff --git a/hugegraph-server/hugegraph-hstore/src/main/java/org/apache/hugegraph/backend/store/hstore/HstoreProvider.java b/hugegraph-server/hugegraph-hstore/src/main/java/org/apache/hugegraph/backend/store/hstore/HstoreProvider.java new file mode 100644 index 0000000000..f9d48d36c9 --- /dev/null +++ b/hugegraph-server/hugegraph-hstore/src/main/java/org/apache/hugegraph/backend/store/hstore/HstoreProvider.java @@ -0,0 +1,54 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.backend.store.hstore; + +import org.apache.hugegraph.backend.store.AbstractBackendStoreProvider; +import org.apache.hugegraph.backend.store.BackendStore; +import org.apache.hugegraph.config.HugeConfig; + +public class HstoreProvider extends AbstractBackendStoreProvider { + + protected String namespace() { + return this.graph(); + } + + @Override + public String type() { + return "hstore"; + } + + @Override + public String driverVersion() { + return "1.13"; + } + + @Override + protected BackendStore newSchemaStore(HugeConfig config, String store) { + return new HstoreStore.HstoreSchemaStore(this, this.namespace(), store); + } + + @Override + protected BackendStore newGraphStore(HugeConfig config, String store) { + return new HstoreStore.HstoreGraphStore(this, this.namespace(), store); + } + + @Override + protected BackendStore newSystemStore(HugeConfig config, String store) { + return null; + } +} diff --git a/hugegraph-server/hugegraph-hstore/src/main/java/org/apache/hugegraph/backend/store/hstore/HstoreSessions.java b/hugegraph-server/hugegraph-hstore/src/main/java/org/apache/hugegraph/backend/store/hstore/HstoreSessions.java new file mode 100755 index 0000000000..883ae7bec8 --- /dev/null +++ b/hugegraph-server/hugegraph-hstore/src/main/java/org/apache/hugegraph/backend/store/hstore/HstoreSessions.java @@ -0,0 +1,206 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.backend.store.hstore; + +import java.util.Iterator; +import java.util.List; +import java.util.Set; + +import org.apache.commons.lang3.tuple.Pair; +import org.apache.hugegraph.backend.query.Query; +import org.apache.hugegraph.backend.store.BackendEntry; +import org.apache.hugegraph.backend.store.BackendEntry.BackendColumnIterator; +import org.apache.hugegraph.backend.store.BackendSession.AbstractBackendSession; +import org.apache.hugegraph.backend.store.BackendSessionPool; +import org.apache.hugegraph.config.HugeConfig; +import org.apache.hugegraph.store.HgOwnerKey; +import org.apache.hugegraph.type.define.GraphMode; + +public abstract class HstoreSessions extends BackendSessionPool { + + public HstoreSessions(HugeConfig config, String database, String store) { + super(config, database + "/" + store); + } + + public abstract Set openedTables(); + + public abstract void createTable(String... tables); + + public abstract void dropTable(String... tables); + + public abstract boolean existsTable(String table); + + public abstract void truncateTable(String table); + + public abstract void clear(); + + @Override + public abstract Session session(); + + public interface Countable { + + public long count(); + } + + /** + * Session for Hstore + */ + public static abstract class Session extends AbstractBackendSession { + + public static final int SCAN_ANY = 0x80; + public static final int SCAN_PREFIX_BEGIN = 0x01; + public static final int SCAN_PREFIX_END = 0x02; + public static final int SCAN_GT_BEGIN = 0x04; + public static final int SCAN_GTE_BEGIN = 0x0c; + public static final int SCAN_LT_END = 0x10; + public static final int SCAN_LTE_END = 0x30; + public static final int SCAN_KEY_ONLY = 0x40; + public static final int SCAN_HASHCODE = 0x100; + + private HugeConfig conf; + private String graphName; + + public static boolean matchScanType(int expected, int actual) { + return (expected & actual) == expected; + } + + public abstract void createTable(String tableName); + + public abstract void dropTable(String tableName); + + public abstract boolean existsTable(String tableName); + + public abstract void truncateTable(String tableName); + + public abstract void deleteGraph(); + + public abstract Pair keyRange(String table); + + public abstract void put(String table, byte[] ownerKey, + byte[] key, byte[] value); + + public abstract void increase(String table, byte[] ownerKey, + byte[] key, byte[] value); + + public abstract void delete(String table, byte[] ownerKey, byte[] key); + + public abstract void deletePrefix(String table, byte[] ownerKey, + byte[] key); + + public abstract void deleteRange(String table, byte[] ownerKeyFrom, + byte[] ownerKeyTo, byte[] keyFrom, + byte[] keyTo); + + public abstract byte[] get(String table, byte[] key); + + public abstract byte[] get(String table, byte[] ownerKey, byte[] key); + + public abstract BackendColumnIterator scan(String table); + + public abstract BackendColumnIterator scan(String table, + byte[] ownerKey, + byte[] prefix); + + public BackendColumnIterator scan(String table, byte[] ownerKeyFrom, + byte[] ownerKeyTo, byte[] keyFrom, + byte[] keyTo) { + return this.scan(table, ownerKeyFrom, ownerKeyTo, keyFrom, keyTo, + SCAN_LT_END); + } + + public abstract List scan(String table, + List keys, + int scanType, + long limit, + byte[] query); + + public abstract BackendEntry.BackendIterator scan(String table, + Iterator keys, + int scanType, + Query queryParam, + byte[] query); + + public abstract BackendColumnIterator scan(String table, + byte[] ownerKeyFrom, + byte[] ownerKeyTo, + byte[] keyFrom, + byte[] keyTo, + int scanType); + + public abstract BackendColumnIterator scan(String table, + byte[] ownerKeyFrom, + byte[] ownerKeyTo, + byte[] keyFrom, + byte[] keyTo, + int scanType, + byte[] query); + + public abstract BackendColumnIterator scan(String table, + byte[] ownerKeyFrom, + byte[] ownerKeyTo, + byte[] keyFrom, + byte[] keyTo, + int scanType, + byte[] query, + byte[] position); + + public abstract BackendColumnIterator scan(String table, + int codeFrom, + int codeTo, + int scanType, + byte[] query); + + public abstract BackendColumnIterator scan(String table, + int codeFrom, + int codeTo, + int scanType, + byte[] query, + byte[] position); + + public abstract BackendColumnIterator getWithBatch(String table, + List keys); + + public abstract void merge(String table, byte[] ownerKey, + byte[] key, byte[] value); + + public abstract void setMode(GraphMode mode); + + public abstract void truncate() throws Exception; + + public abstract BackendColumnIterator scan(String table, + byte[] conditionQueryToByte); + + public HugeConfig getConf() { + return conf; + } + + public void setConf(HugeConfig conf) { + this.conf = conf; + } + + public String getGraphName() { + return graphName; + } + + public void setGraphName(String graphName) { + this.graphName = graphName; + } + + public abstract void beginTx(); + } +} diff --git a/hugegraph-server/hugegraph-hstore/src/main/java/org/apache/hugegraph/backend/store/hstore/HstoreSessionsImpl.java b/hugegraph-server/hugegraph-hstore/src/main/java/org/apache/hugegraph/backend/store/hstore/HstoreSessionsImpl.java new file mode 100755 index 0000000000..dad89db79c --- /dev/null +++ b/hugegraph-server/hugegraph-hstore/src/main/java/org/apache/hugegraph/backend/store/hstore/HstoreSessionsImpl.java @@ -0,0 +1,782 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.backend.store.hstore; + +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.NoSuchElementException; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicInteger; + +import org.apache.commons.lang3.ArrayUtils; +import org.apache.commons.lang3.tuple.Pair; +import org.apache.hugegraph.backend.query.Query; +import org.apache.hugegraph.backend.store.BackendEntry; +import org.apache.hugegraph.backend.store.BackendEntry.BackendColumn; +import org.apache.hugegraph.backend.store.BackendEntry.BackendColumnIterator; +import org.apache.hugegraph.backend.store.BackendEntryIterator; +import org.apache.hugegraph.config.HugeConfig; +import org.apache.hugegraph.pd.client.PDClient; +import org.apache.hugegraph.pd.client.PDConfig; +import org.apache.hugegraph.pd.common.PDException; +import org.apache.hugegraph.pd.grpc.Metapb; +import org.apache.hugegraph.store.HgKvEntry; +import org.apache.hugegraph.store.HgKvIterator; +import org.apache.hugegraph.store.HgOwnerKey; +import org.apache.hugegraph.store.HgScanQuery; +import org.apache.hugegraph.store.HgStoreClient; +import org.apache.hugegraph.store.HgStoreSession; +import org.apache.hugegraph.store.client.util.HgStoreClientConst; +import org.apache.hugegraph.testutil.Assert; +import org.apache.hugegraph.type.define.GraphMode; +import org.apache.hugegraph.util.Bytes; +import org.apache.hugegraph.util.E; +import org.apache.hugegraph.util.StringEncoding; + +public class HstoreSessionsImpl extends HstoreSessions { + + private static final Set infoInitializedGraph = + Collections.synchronizedSet(new HashSet<>()); + private static int tableCode = 0; + private static volatile Boolean initializedNode = Boolean.FALSE; + private static volatile PDClient defaultPdClient; + private static volatile HgStoreClient hgStoreClient; + private final HugeConfig config; + private final HstoreSession session; + private final Map tables; + private final AtomicInteger refCount; + private final String graphName; + + public HstoreSessionsImpl(HugeConfig config, String database, + String store) { + super(config, database, store); + this.config = config; + this.graphName = database + "/" + store; + this.initStoreNode(config); + this.session = new HstoreSession(this.config, graphName); + this.tables = new ConcurrentHashMap<>(); + this.refCount = new AtomicInteger(1); + } + + public static HgStoreClient getHgStoreClient() { + return hgStoreClient; + } + + public static PDClient getDefaultPdClient() { + return defaultPdClient; + } + + public static byte[] encode(String string) { + return StringEncoding.encode(string); + } + + public static String decode(byte[] bytes) { + return StringEncoding.decode(bytes); + } + + private void initStoreNode(HugeConfig config) { + if (!initializedNode) { + synchronized (this) { + if (!initializedNode) { + PDConfig pdConfig = + PDConfig.of(config.get(HstoreOptions.PD_PEERS)) + .setEnableCache(true); + defaultPdClient = PDClient.create(pdConfig); + hgStoreClient = + HgStoreClient.create(defaultPdClient); + initializedNode = Boolean.TRUE; + } + } + } + } + + @Override + public void open() throws Exception { + if (!infoInitializedGraph.contains(this.graphName)) { + synchronized (infoInitializedGraph) { + if (!infoInitializedGraph.contains(this.graphName)) { + Integer partitionCount = + this.config.get(HstoreOptions.PARTITION_COUNT); + Assert.assertTrue("The value of hstore.partition_count" + + " cannot be less than 0.", + partitionCount > -1); + defaultPdClient.setGraph(Metapb.Graph.newBuilder() + .setGraphName( + this.graphName) + .setPartitionCount( + partitionCount) + .build()); + infoInitializedGraph.add(this.graphName); + } + } + } + this.session.open(); + } + + @Override + protected boolean opened() { + return this.session != null; + } + + @Override + public Set openedTables() { + return this.tables.keySet(); + } + + @Override + public synchronized void createTable(String... tables) { + for (String table : tables) { + this.session.createTable(table); + this.tables.put(table, tableCode++); + } + } + + @Override + public synchronized void dropTable(String... tables) { + for (String table : tables) { + this.session.dropTable(table); + this.tables.remove(table); + } + } + + @Override + public boolean existsTable(String table) { + return this.session.existsTable(table); + } + + @Override + public void truncateTable(String table) { + this.session.truncateTable(table); + } + + @Override + public void clear() { + this.session.deleteGraph(); + try { + hgStoreClient.getPdClient().delGraph(this.graphName); + } catch (PDException e) { + + } + } + + @Override + public final Session session() { + return (Session) super.getOrNewSession(); + } + + @Override + protected final Session newSession() { + return new HstoreSession(this.config(), this.graphName); + } + + @Override + protected synchronized void doClose() { + this.checkValid(); + if (this.refCount != null) { + if (this.refCount.decrementAndGet() > 0) { + return; + } + if (this.refCount.get() != 0) { + return; + } + } + assert this.refCount.get() == 0; + this.tables.clear(); + this.session.close(); + } + + private void checkValid() { + } + + private static class ColumnIterator implements + BackendColumnIterator, + Countable { + + private final T iter; + private final byte[] keyBegin; + private final byte[] keyEnd; + private final int scanType; + private final String table; + private final byte[] value; + private boolean gotNext; + private byte[] position; + + public ColumnIterator(String table, T results) { + this(table, results, null, null, 0); + } + + public ColumnIterator(String table, T results, byte[] keyBegin, + byte[] keyEnd, int scanType) { + E.checkNotNull(results, "results"); + this.table = table; + this.iter = results; + this.keyBegin = keyBegin; + this.keyEnd = keyEnd; + this.scanType = scanType; + this.value = null; + if (this.iter.hasNext()) { + this.iter.next(); + this.gotNext = true; + this.position = iter.position(); + } else { + this.gotNext = false; + this.position = null; + } + if (!ArrayUtils.isEmpty(this.keyBegin) || + !ArrayUtils.isEmpty(this.keyEnd)) { + this.checkArguments(); + } + + } + + public T iter() { + return iter; + } + + private void checkArguments() { + E.checkArgument(!(this.match(Session.SCAN_PREFIX_BEGIN) && + this.match(Session.SCAN_PREFIX_END)), + "Can't set SCAN_PREFIX_WITH_BEGIN and " + + "SCAN_PREFIX_WITH_END at the same time"); + + E.checkArgument(!(this.match(Session.SCAN_PREFIX_BEGIN) && + this.match(Session.SCAN_GT_BEGIN)), + "Can't set SCAN_PREFIX_WITH_BEGIN and " + + "SCAN_GT_BEGIN/SCAN_GTE_BEGIN at the same time"); + + E.checkArgument(!(this.match(Session.SCAN_PREFIX_END) && + this.match(Session.SCAN_LT_END)), + "Can't set SCAN_PREFIX_WITH_END and " + + "SCAN_LT_END/SCAN_LTE_END at the same time"); + + if (this.match(Session.SCAN_PREFIX_BEGIN) && !matchHash()) { + E.checkArgument(this.keyBegin != null, + "Parameter `keyBegin` can't be null " + + "if set SCAN_PREFIX_WITH_BEGIN"); + E.checkArgument(this.keyEnd == null, + "Parameter `keyEnd` must be null " + + "if set SCAN_PREFIX_WITH_BEGIN"); + } + + if (this.match(Session.SCAN_PREFIX_END) && !matchHash()) { + E.checkArgument(this.keyEnd != null, + "Parameter `keyEnd` can't be null " + + "if set SCAN_PREFIX_WITH_END"); + } + + if (this.match(Session.SCAN_GT_BEGIN) && !matchHash()) { + E.checkArgument(this.keyBegin != null, + "Parameter `keyBegin` can't be null " + + "if set SCAN_GT_BEGIN or SCAN_GTE_BEGIN"); + } + + if (this.match(Session.SCAN_LT_END) && !matchHash()) { + E.checkArgument(this.keyEnd != null, + "Parameter `keyEnd` can't be null " + + "if set SCAN_LT_END or SCAN_LTE_END"); + } + } + + private boolean matchHash() { + return this.scanType == Session.SCAN_HASHCODE; + } + + private boolean match(int expected) { + return Session.matchScanType(expected, this.scanType); + } + + + @Override + public boolean hasNext() { + if (gotNext) { + this.position = this.iter.position(); + } else { + this.position = null; + } + return gotNext; + } + + private boolean filter(byte[] key) { + if (this.match(Session.SCAN_PREFIX_BEGIN)) { + /* + * Prefix with `keyBegin`? + * TODO: use custom prefix_extractor instead + * or use ReadOptions.prefix_same_as_start + */ + return Bytes.prefixWith(key, this.keyBegin); + } else if (this.match(Session.SCAN_PREFIX_END)) { + /* + * Prefix with `keyEnd`? + * like the following query for range index: + * key > 'age:20' and prefix with 'age' + */ + assert this.keyEnd != null; + return Bytes.prefixWith(key, this.keyEnd); + } else if (this.match(Session.SCAN_LT_END)) { + /* + * Less (equal) than `keyEnd`? + * NOTE: don't use BytewiseComparator due to signed byte + */ + if ((this.scanType | Session.SCAN_HASHCODE) != 0) { + return true; + } + assert this.keyEnd != null; + if (this.match(Session.SCAN_LTE_END)) { + // Just compare the prefix, can be there are excess tail + key = Arrays.copyOfRange(key, 0, this.keyEnd.length); + return Bytes.compare(key, this.keyEnd) <= 0; + } else { + return Bytes.compare(key, this.keyEnd) < 0; + } + } else { + assert this.match(Session.SCAN_ANY) || this.match(Session.SCAN_GT_BEGIN) || + this.match( + Session.SCAN_GTE_BEGIN) : "Unknown scan type"; + return true; + } + } + + @Override + public BackendColumn next() { + BackendEntryIterator.checkInterrupted(); + if (!this.hasNext()) { + throw new NoSuchElementException(); + } + BackendColumn col = + BackendColumn.of(this.iter.key(), + this.iter.value()); + if (this.iter.hasNext()) { + gotNext = true; + this.iter.next(); + } else { + gotNext = false; + } + return col; + } + + @Override + public long count() { + long count = 0L; + while (this.hasNext()) { + this.next(); + count++; + BackendEntryIterator.checkInterrupted(); + } + return count; + } + + @Override + public byte[] position() { + return this.position; + } + + @Override + public void close() { + if (this.iter != null) { + this.iter.close(); + } + } + } + + /** + * HstoreSession implement for hstore + */ + private final class HstoreSession extends Session { + + private static final boolean TRANSACTIONAL = true; + private final HgStoreSession graph; + int changedSize = 0; + + public HstoreSession(HugeConfig conf, String graphName) { + setGraphName(graphName); + setConf(conf); + this.graph = hgStoreClient.openSession(graphName); + } + + @Override + public void open() { + this.opened = true; + } + + @Override + public void close() { + this.opened = false; + } + + @Override + public boolean closed() { + return !this.opened; + } + + @Override + public void reset() { + if (this.changedSize != 0) { + this.rollback(); + this.changedSize = 0; + } + } + + /** + * Any change in the session + */ + @Override + public boolean hasChanges() { + return this.changedSize > 0; + } + + /** + * Commit all updates(put/delete) to DB + */ + @Override + public Integer commit() { + int commitSize = this.changedSize; + if (TRANSACTIONAL) { + this.graph.commit(); + } + this.changedSize = 0; + return commitSize; + } + + /** + * Rollback all updates(put/delete) not committed + */ + @Override + public void rollback() { + if (TRANSACTIONAL) { + this.graph.rollback(); + } + this.changedSize = 0; + } + + @Override + public void createTable(String tableName) { + this.graph.createTable(tableName); + } + + @Override + public void dropTable(String tableName) { + this.graph.dropTable(tableName); + } + + @Override + public boolean existsTable(String tableName) { + return this.graph.existsTable(tableName); + } + + @Override + public void truncateTable(String tableName) { + this.graph.deleteTable(tableName); + } + + @Override + public void deleteGraph() { + this.graph.deleteGraph(this.getGraphName()); + } + + @Override + public Pair keyRange(String table) { + return null; + } + + private void prepare() { + if (!this.hasChanges() && TRANSACTIONAL) { + this.graph.beginTx(); + } + this.changedSize++; + } + + /** + * Add a KV record to a table + */ + @Override + public void put(String table, byte[] ownerKey, byte[] key, + byte[] value) { + prepare(); + this.graph.put(table, HgOwnerKey.of(ownerKey, key), value); + } + + @Override + public synchronized void increase(String table, byte[] ownerKey, + byte[] key, byte[] value) { + prepare(); + this.graph.merge(table, HgOwnerKey.of(ownerKey, key), value); + } + + @Override + public void delete(String table, byte[] ownerKey, byte[] key) { + prepare(); + this.graph.delete(table, HgOwnerKey.of(ownerKey, key)); + } + + @Override + public void deletePrefix(String table, byte[] ownerKey, byte[] key) { + prepare(); + this.graph.deletePrefix(table, HgOwnerKey.of(ownerKey, key)); + } + + /** + * Delete a range of keys from a table + */ + @Override + public void deleteRange(String table, byte[] ownerKeyFrom, + byte[] ownerKeyTo, byte[] keyFrom, + byte[] keyTo) { + prepare(); + this.graph.deleteRange(table, HgOwnerKey.of(ownerKeyFrom, keyFrom), + HgOwnerKey.of(ownerKeyTo, keyTo)); + } + + @Override + public byte[] get(String table, byte[] key) { + return this.graph.get(table, HgOwnerKey.of( + HgStoreClientConst.ALL_PARTITION_OWNER, key)); + } + + @Override + public byte[] get(String table, byte[] ownerKey, byte[] key) { + byte[] values = this.graph.get(table, HgOwnerKey.of(ownerKey, key)); + return values != null ? values : new byte[0]; + } + + @Override + public void beginTx() { + this.graph.beginTx(); + } + + @Override + public BackendColumnIterator scan(String table) { + assert !this.hasChanges(); + return new ColumnIterator<>(table, this.graph.scanIterator(table)); + } + + @Override + public BackendColumnIterator scan(String table, + byte[] conditionQueryToByte) { + assert !this.hasChanges(); + HgKvIterator results = + this.graph.scanIterator(table, conditionQueryToByte); + return new ColumnIterator<>(table, results); + } + + @Override + public BackendColumnIterator scan(String table, byte[] ownerKey, + byte[] prefix) { + assert !this.hasChanges(); + HgKvIterator result = this.graph.scanIterator(table, + HgOwnerKey.of( + ownerKey, + prefix)); + return new ColumnIterator<>(table, result); + } + + @Override + public List scan(String table, + List keys, + int scanType, long limit, + byte[] query) { + HgScanQuery scanQuery = HgScanQuery.prefixOf(table, keys).builder() + .setScanType(scanType) + .setQuery(query) + .setPerKeyLimit(limit).build(); + List> scanIterators = + this.graph.scanBatch(scanQuery); + LinkedList columnIterators = + new LinkedList<>(); + scanIterators.forEach(item -> { + columnIterators.add( + new ColumnIterator<>(table, item)); + }); + return columnIterators; + } + + @Override + public BackendEntry.BackendIterator scan( + String table, + Iterator keys, + int scanType, Query queryParam, byte[] query) { + //ScanOrderType orderType; + //switch (queryParam.orderType()) { + // case ORDER_NONE: + // orderType = ScanOrderType.ORDER_NONE; + // break; + // case ORDER_WITHIN_VERTEX: + // orderType = ScanOrderType.ORDER_WITHIN_VERTEX; + // break; + // case ORDER_STRICT: + // orderType = ScanOrderType.ORDER_STRICT; + // break; + // default: + // throw new RuntimeException("not implement"); + //} + //HgScanQuery scanQuery = HgScanQuery.prefixIteratorOf(table, keys) + // .builder() + // .setScanType(scanType) + // .setQuery(query) + // .setPerKeyMax(queryParam.limit()) + // .setOrderType(orderType) + // .setOnlyKey( + // !queryParam.withProperties()) + // .setSkipDegree( + // queryParam.skipDegree()) + // .build(); + //KvCloseableIterator> scanIterators = + // this.graph.scanBatch2(scanQuery); + //return new BackendEntry.BackendIterator<>() { + // @Override + // public void close() { + // scanIterators.close(); + // } + // + // @Override + // public byte[] position() { + // throw new NotImplementedException(); + // } + // + // @Override + // public boolean hasNext() { + // return scanIterators.hasNext(); + // } + // + // @Override + // public BackendColumnIterator next() { + // return new ColumnIterator(table, + // scanIterators.next()); + // } + //}; + return null; + } + + @Override + public BackendColumnIterator scan(String table, byte[] ownerKeyFrom, + byte[] ownerKeyTo, + byte[] keyFrom, byte[] keyTo, + int scanType) { + assert !this.hasChanges(); + HgKvIterator result = this.graph.scanIterator(table, HgOwnerKey.of( + ownerKeyFrom, keyFrom), + HgOwnerKey.of( + ownerKeyTo, + keyTo), 0, + scanType, + null); + return new ColumnIterator<>(table, result, keyFrom, + keyTo, scanType); + } + + @Override + public BackendColumnIterator scan(String table, byte[] ownerKeyFrom, + byte[] ownerKeyTo, + byte[] keyFrom, byte[] keyTo, + int scanType, byte[] query) { + assert !this.hasChanges(); + HgKvIterator result = this.graph.scanIterator(table, + HgOwnerKey.of( + ownerKeyFrom, + keyFrom), + HgOwnerKey.of( + ownerKeyTo, + keyTo), + 0, + scanType, + query); + return new ColumnIterator<>(table, result, keyFrom, keyTo, + scanType); + } + + @Override + public BackendColumnIterator scan(String table, byte[] ownerKeyFrom, + byte[] ownerKeyTo, + byte[] keyFrom, byte[] keyTo, + int scanType, byte[] query, + byte[] position) { + assert !this.hasChanges(); + HgKvIterator result = this.graph.scanIterator(table, + HgOwnerKey.of( + ownerKeyFrom, + keyFrom), + HgOwnerKey.of( + ownerKeyTo, + keyTo), + 0, + scanType, + query); + result.seek(position); + return new ColumnIterator<>(table, result, keyFrom, keyTo, + scanType); + } + + @Override + public BackendColumnIterator scan(String table, int codeFrom, + int codeTo, int scanType, + byte[] query) { + assert !this.hasChanges(); + HgKvIterator iterator = + this.graph.scanIterator(table, codeFrom, codeTo, 256, + new byte[0]); + return new ColumnIterator<>(table, iterator, new byte[0], + new byte[0], scanType); + } + + @Override + public BackendColumnIterator scan(String table, int codeFrom, + int codeTo, int scanType, + byte[] query, byte[] position) { + assert !this.hasChanges(); + HgKvIterator iterator = + this.graph.scanIterator(table, codeFrom, codeTo, 256, + new byte[0]); + iterator.seek(position); + return new ColumnIterator<>(table, iterator, new byte[0], + new byte[0], scanType); + } + + @Override + public BackendColumnIterator getWithBatch(String table, + List keys) { + assert !this.hasChanges(); + HgKvIterator kvIterator = + this.graph.batchPrefix(table, keys); + return new ColumnIterator<>(table, kvIterator); + } + + @Override + public void merge(String table, byte[] ownerKey, byte[] key, + byte[] value) { + prepare(); + this.graph.merge(table, HgOwnerKey.of(ownerKey, key), value); + } + + @Override + public void setMode(GraphMode mode) { + // no need to set pd mode + } + + @Override + public void truncate() throws Exception { + this.graph.truncate(); + HstoreSessionsImpl.getDefaultPdClient() + .resetIdByKey(this.getGraphName()); + } + } +} diff --git a/hugegraph-server/hugegraph-hstore/src/main/java/org/apache/hugegraph/backend/store/hstore/HstoreStore.java b/hugegraph-server/hugegraph-hstore/src/main/java/org/apache/hugegraph/backend/store/hstore/HstoreStore.java new file mode 100644 index 0000000000..588aa5f530 --- /dev/null +++ b/hugegraph-server/hugegraph-hstore/src/main/java/org/apache/hugegraph/backend/store/hstore/HstoreStore.java @@ -0,0 +1,811 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.backend.store.hstore; + +import java.util.Collection; +import java.util.HashMap; +import java.util.Iterator; +import java.util.LinkedHashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReadWriteLock; +import java.util.concurrent.locks.ReentrantReadWriteLock; +import java.util.stream.Collectors; + +import org.apache.commons.collections.CollectionUtils; +import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.backend.id.IdGenerator; +import org.apache.hugegraph.backend.query.IdPrefixQuery; +import org.apache.hugegraph.backend.query.IdQuery; +import org.apache.hugegraph.backend.query.Query; +import org.apache.hugegraph.backend.serializer.BinaryBackendEntry; +import org.apache.hugegraph.backend.serializer.BytesBuffer; +import org.apache.hugegraph.backend.store.AbstractBackendStore; +import org.apache.hugegraph.backend.store.BackendAction; +import org.apache.hugegraph.backend.store.BackendEntry; +import org.apache.hugegraph.backend.store.BackendFeatures; +import org.apache.hugegraph.backend.store.BackendMutation; +import org.apache.hugegraph.backend.store.BackendStoreProvider; +import org.apache.hugegraph.backend.store.BackendTable; +import org.apache.hugegraph.backend.store.hstore.HstoreSessions.Session; +import org.apache.hugegraph.config.CoreOptions; +import org.apache.hugegraph.config.HugeConfig; +import org.apache.hugegraph.iterator.CIter; +import org.apache.hugegraph.type.HugeTableType; +import org.apache.hugegraph.type.HugeType; +import org.apache.hugegraph.type.define.Action; +import org.apache.hugegraph.type.define.GraphMode; +import org.apache.hugegraph.util.E; +import org.apache.hugegraph.util.Log; +import org.slf4j.Logger; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; + +public abstract class HstoreStore extends AbstractBackendStore { + + private static final Logger LOG = Log.logger(HstoreStore.class); + + private static final Set INDEX_TYPES = ImmutableSet.of( + HugeType.SECONDARY_INDEX, HugeType.VERTEX_LABEL_INDEX, + HugeType.EDGE_LABEL_INDEX, HugeType.RANGE_INT_INDEX, + HugeType.RANGE_FLOAT_INDEX, HugeType.RANGE_LONG_INDEX, + HugeType.RANGE_DOUBLE_INDEX, HugeType.SEARCH_INDEX, + HugeType.SHARD_INDEX, HugeType.UNIQUE_INDEX + ); + + private static final BackendFeatures FEATURES = new HstoreFeatures(); + private final String store, namespace; + + private final BackendStoreProvider provider; + private final Map tables; + private final ReadWriteLock storeLock; + private boolean isGraphStore; + private HstoreSessions sessions; + + public HstoreStore(final BackendStoreProvider provider, + String namespace, String store) { + this.tables = new HashMap<>(); + this.provider = provider; + this.namespace = namespace; + this.store = store; + this.sessions = null; + this.storeLock = new ReentrantReadWriteLock(); + this.registerMetaHandlers(); + LOG.debug("Store loaded: {}", store); + } + + private void registerMetaHandlers() { + this.registerMetaHandler("metrics", (session, meta, args) -> { + return ImmutableMap.of(); + }); + this.registerMetaHandler("mode", (session, meta, args) -> { + E.checkArgument(args.length == 1, + "The args count of %s must be 1", meta); + session.setMode((GraphMode) args[0]); + return null; + }); + } + + protected void registerTableManager(HugeTableType type, HstoreTable table) { + this.tables.put((int) type.code(), table); + } + + @Override + protected final HstoreTable table(HugeType type) { + assert type != null; + HugeTableType table; + switch (type) { + case VERTEX: + table = HugeTableType.VERTEX; + break; + case EDGE_OUT: + table = HugeTableType.OUT_EDGE; + break; + case EDGE_IN: + table = HugeTableType.IN_EDGE; + break; + case OLAP: + table = HugeTableType.OLAP_TABLE; + break; + case TASK: + table = HugeTableType.TASK_INFO_TABLE; + break; + case SERVER: + table = HugeTableType.SERVER_INFO_TABLE; + break; + case SEARCH_INDEX: + case SHARD_INDEX: + case SECONDARY_INDEX: + case RANGE_INT_INDEX: + case RANGE_LONG_INDEX: + case RANGE_FLOAT_INDEX: + case RANGE_DOUBLE_INDEX: + case EDGE_LABEL_INDEX: + case VERTEX_LABEL_INDEX: + case UNIQUE_INDEX: + table = HugeTableType.ALL_INDEX_TABLE; + break; + default: + throw new AssertionError(String.format( + "Invalid type: %s", type)); + } + return this.tables.get((int) table.code()); + } + + protected List tableNames() { + return this.tables.values().stream() + .map(BackendTable::table) + .collect(Collectors.toList()); + } + + @Override + protected Session session(HugeType type) { + this.checkOpened(); + return this.sessions.session(); + } + + public String namespace() { + return this.namespace; + } + + @Override + public String store() { + return this.store; + } + + @Override + public String database() { + return this.namespace; + } + + @Override + public BackendStoreProvider provider() { + return this.provider; + } + + @Override + public BackendFeatures features() { + return FEATURES; + } + + @Override + public synchronized void open(HugeConfig config) { + E.checkNotNull(config, "config"); + + if (this.sessions == null) { + this.sessions = new HstoreSessionsImpl(config, this.namespace, + this.store); + } + + String graphStore = config.get(CoreOptions.STORE_GRAPH); + this.isGraphStore = this.store.equals(graphStore); + assert this.sessions != null; + if (!this.sessions.closed()) { + LOG.debug("Store {} has been opened before", this.store); + this.sessions.useSession(); + return; + } + + try { + // NOTE: won't throw error even if connection refused + this.sessions.open(); + } catch (Exception e) { + LOG.error("Failed to open Hstore '{}':{}", this.store, e); + } + this.sessions.session(); + LOG.debug("Store opened: {}", this.store); + } + + @Override + public void close() { + this.checkOpened(); + this.sessions.close(); + + LOG.debug("Store closed: {}", this.store); + } + + @Override + public boolean opened() { + this.checkConnectionOpened(); + return this.sessions.session().opened(); + } + + @Override + public void mutate(BackendMutation mutation) { + Session session = this.sessions.session(); + assert session.opened(); + Map>> mutations = mutation.mutations(); + Set>>> entries = mutations.entrySet(); + for (Map.Entry>> entry : entries) { + HugeType key = entry.getKey(); + // in order to obtain the owner efficiently, special for edge + boolean isEdge = key.isEdge(); + HstoreTable hTable = this.table(key); + Map> table = entry.getValue(); + Collection> values = table.values(); + for (List items : values) { + for (int i = 0; i < items.size(); i++) { + BackendAction item = items.get(i); + // set to ArrayList, use index to get item + this.mutate(session, item, hTable, isEdge); + } + } + } + } + + private void mutate(Session session, BackendAction item, + HstoreTable hTable, boolean isEdge) { + BackendEntry entry = item.entry(); + HstoreTable table; + if (!entry.olap()) { + // Oltp table + table = hTable; + } else { + if (entry.type().isIndex()) { + // Olap index + table = this.table(entry.type()); + } else { + // Olap vertex + table = this.table(HugeType.OLAP); + } + session = this.session(HugeType.OLAP); + } + + if (item.action().code() == Action.INSERT.code()) { + table.insert(session, entry, isEdge); + } else { + if (item.action().code() == Action.APPEND.code()) { + table.append(session, entry); + } else { + switch (item.action()) { + case DELETE: + table.delete(session, entry); + break; + case ELIMINATE: + table.eliminate(session, entry); + break; + case UPDATE_IF_PRESENT: + table.updateIfPresent(session, entry); + break; + case UPDATE_IF_ABSENT: + table.updateIfAbsent(session, entry); + break; + default: + throw new AssertionError(String.format( + "Unsupported mutate action: %s", + item.action())); + } + } + } + } + + private HstoreTable getTableByQuery(Query query) { + HugeType tableType = HstoreTable.tableType(query); + HstoreTable table; + if (query.olap()) { + if (query.resultType().isIndex()) { + // Any index type is ok here + table = this.table(HugeType.SECONDARY_INDEX); + } else { + table = this.table(HugeType.OLAP); + } + } else { + table = this.table(tableType); + } + return table; + } + + @Override + public Iterator query(Query query) { + Lock readLock = this.storeLock.readLock(); + readLock.lock(); + try { + this.checkOpened(); + Session session = this.sessions.session(); + HstoreTable table = getTableByQuery(query); + Iterator entries = table.query(session, query); + // Merge olap results as needed + entries = getBackendEntryIterator(entries, query); + return entries; + } finally { + readLock.unlock(); + } + } + + + //@Override + //public Iterator> query(Iterator queries, + // Function queryWriter, + // HugeGraph hugeGraph) { + // if (queries == null || !queries.hasNext()) { + // return Collections.emptyIterator(); + // } + // + // class QueryWrapper implements Iterator { + // Query first; + // final Iterator queries; + // Iterator subEls; + // Query preQuery; + // Iterator queryListIterator; + // + // QueryWrapper(Iterator queries, Query first) { + // this.queries = queries; + // this.first = first; + // } + // + // @Override + // public boolean hasNext() { + // return first != null || (this.subEls != null && this.subEls.hasNext()) + // || (queryListIterator != null && queryListIterator.hasNext()) || + // queries.hasNext(); + // } + // + // @Override + // public IdPrefixQuery next() { + // if (queryListIterator != null && queryListIterator.hasNext()) { + // return queryListIterator.next(); + // } + // + // Query q; + // if (first != null) { + // q = first; + // preQuery = q.copy(); + // first = null; + // } else { + // if (this.subEls == null || !this.subEls.hasNext()) { + // q = queries.next(); + // preQuery = q.copy(); + // } else { + // q = preQuery.copy(); + // } + // } + // + // assert q instanceof ConditionQuery; + // ConditionQuery cq = (ConditionQuery) q; + // ConditionQuery originQuery = (ConditionQuery) q.copy(); + // + // List queryList = Lists.newArrayList(); + // if (hugeGraph != null) { + // for (ConditionQuery conditionQuery : + // ConditionQueryFlatten.flatten(cq)) { + // Id label = conditionQuery.condition(HugeKeys.LABEL); + // /* 父类型 + sortKeys: g.V("V.id").outE("parentLabel").has + // ("sortKey","value")转成 所有子类型 + sortKeys*/ + // if ((this.subEls == null || + // !this.subEls.hasNext()) && label != null && + // hugeGraph.edgeLabel(label).isFather() && + // conditionQuery.condition(HugeKeys.SUB_LABEL) == + // null && + // conditionQuery.condition(HugeKeys.OWNER_VERTEX) != + // null && + // conditionQuery.condition(HugeKeys.DIRECTION) != + // null && + // matchEdgeSortKeys(conditionQuery, false, + // hugeGraph)) { + // this.subEls = + // getSubLabelsOfParentEl( + // hugeGraph.edgeLabels(), + // label); + // } + // + // if (this.subEls != null && + // this.subEls.hasNext()) { + // conditionQuery.eq(HugeKeys.SUB_LABEL, + // subEls.next()); + // } + // + // HugeType hugeType = conditionQuery.resultType(); + // if (hugeType != null && hugeType.isEdge() && + // !conditionQuery.conditions().isEmpty()) { + // IdPrefixQuery idPrefixQuery = + // (IdPrefixQuery) queryWriter.apply( + // conditionQuery); + // idPrefixQuery.setOriginQuery(originQuery); + // queryList.add(idPrefixQuery); + // } + // } + // + // queryListIterator = queryList.iterator(); + // if (queryListIterator.hasNext()) { + // return queryListIterator.next(); + // } + // } + // + // Id ownerId = cq.condition(HugeKeys.OWNER_VERTEX); + // assert ownerId != null; + // BytesBuffer buffer = + // BytesBuffer.allocate(BytesBuffer.BUF_EDGE_ID); + // buffer.writeId(ownerId); + // return new IdPrefixQuery(cq, new BinaryBackendEntry.BinaryId( + // buffer.bytes(), ownerId)); + // } + // + // private boolean matchEdgeSortKeys(ConditionQuery query, + // boolean matchAll, + // HugeGraph graph) { + // assert query.resultType().isEdge(); + // Id label = query.condition(HugeKeys.LABEL); + // if (label == null) { + // return false; + // } + // List sortKeys = graph.edgeLabel(label).sortKeys(); + // if (sortKeys.isEmpty()) { + // return false; + // } + // Set queryKeys = query.userpropKeys(); + // for (int i = sortKeys.size(); i > 0; i--) { + // List subFields = sortKeys.subList(0, i); + // if (queryKeys.containsAll(subFields)) { + // if (queryKeys.size() == subFields.size() || !matchAll) { + // /* + // * Return true if: + // * matchAll=true and all queryKeys are in sortKeys + // * or + // * partial queryKeys are in sortKeys + // */ + // return true; + // } + // } + // } + // return false; + // } + // } + // Query first = queries.next(); + // List typeList = getHugeTypes(first); + // QueryWrapper idPrefixQueries = new QueryWrapper(queries, first); + // + // return query(typeList, idPrefixQueries); + //} + + //private Iterator getSubLabelsOfParentEl(Collection allEls, + // Id label) { + // List list = new ArrayList<>(); + // for (EdgeLabel el : allEls) { + // if (el.edgeLabelType().sub() && el.fatherId().equals(label)) { + // list.add(el.id()); + // } + // } + // return list.iterator(); + //} + + public List> query(List typeList, + List queries) { + Lock readLock = this.storeLock.readLock(); + readLock.lock(); + LinkedList> results = new LinkedList<>(); + try { + this.checkOpened(); + Session session = this.sessions.session(); + E.checkState(!CollectionUtils.isEmpty(queries) && + !CollectionUtils.isEmpty(typeList), + "Please check query list or type list."); + HstoreTable table = null; + StringBuilder builder = new StringBuilder(); + for (HugeType type : typeList) { + builder.append((table = this.table(type)).table()).append(","); + } + List> iteratorList = + table.query(session, queries, + builder.substring(0, builder.length() - 1)); + for (int i = 0; i < iteratorList.size(); i++) { + Iterator entries = iteratorList.get(i); + // Merge olap results as needed + Query query = queries.get(i); + entries = getBackendEntryIterator(entries, query); + if (entries instanceof CIter) { + results.add((CIter) entries); + } + } + return results; + } finally { + readLock.unlock(); + } + } + + public Iterator> query(List typeList, + Iterator queries) { + Lock readLock = this.storeLock.readLock(); + readLock.lock(); + try { + this.checkOpened(); + Session session = this.sessions.session(); + E.checkState(queries.hasNext() && + !CollectionUtils.isEmpty(typeList), + "Please check query list or type list."); + HstoreTable table = null; + StringBuilder builder = new StringBuilder(); + for (HugeType type : typeList) { + builder.append((table = this.table(type)).table()).append(","); + } + + Iterator> iterators = + table.query(session, queries, + builder.substring(0, builder.length() - 1)); + + return iterators; + } finally { + readLock.unlock(); + } + } + + private Iterator getBackendEntryIterator( + Iterator entries, + Query query) { + //HstoreTable table; + //Set olapPks = query.olapPks(); + //if (this.isGraphStore && !olapPks.isEmpty()) { + // List> iterators = new ArrayList<>(); + // for (Id pk : olapPks) { + // // 构造olap表查询query condition + // Query q = this.constructOlapQueryCondition(pk, query); + // table = this.table(HugeType.OLAP); + // iterators.add(table.queryOlap(this.session(HugeType.OLAP), q)); + // } + // entries = new MergeIterator<>(entries, iterators, + // BackendEntry::mergable); + //} + return entries; + } + + + /** + * 重新构造 查询olap表 query + * 由于 olap合并成一张表, 在写入olap数据, key在后面增加了pk + * 所以在此进行查询的时候,需要重新构造pk前缀 + * 写入参考 BinarySerializer.writeOlapVertex + * + * @param pk + * @param query + * @return + */ + private Query constructOlapQueryCondition(Id pk, Query query) { + if (query instanceof IdQuery && !CollectionUtils.isEmpty((query).ids())) { + IdQuery q = (IdQuery) query.copy(); + Iterator iterator = q.ids().iterator(); + LinkedHashSet linkedHashSet = new LinkedHashSet<>(); + while (iterator.hasNext()) { + Id id = iterator.next(); + if (id instanceof BinaryBackendEntry.BinaryId) { + id = ((BinaryBackendEntry.BinaryId) id).origin(); + } + + // create binary id + BytesBuffer buffer = + BytesBuffer.allocate(1 + pk.length() + 1 + id.length()); + buffer.writeId(pk); + id = new BinaryBackendEntry.BinaryId( + buffer.writeId(id).bytes(), id); + linkedHashSet.add(id); + } + q.resetIds(); + q.query(linkedHashSet); + return q; + } else { + // create binary id + BytesBuffer buffer = BytesBuffer.allocate(1 + pk.length()); + pk = new BinaryBackendEntry.BinaryId( + buffer.writeId(pk).bytes(), pk); + + IdPrefixQuery idPrefixQuery = new IdPrefixQuery(HugeType.OLAP, pk); + return idPrefixQuery; + } + } + + @Override + public Number queryNumber(Query query) { + this.checkOpened(); + + Session session = this.sessions.session(); + HstoreTable table = this.table(HstoreTable.tableType(query)); + return table.queryNumber(session, query); + } + + @Override + public synchronized void init() { + Lock writeLock = this.storeLock.writeLock(); + writeLock.lock(); + try { + // Create tables with main disk + this.sessions.createTable(this.tableNames().toArray(new String[0])); + LOG.debug("Store initialized: {}", this.store); + } finally { + writeLock.unlock(); + } + } + + @Override + public void clear(boolean clearSpace) { + Lock writeLock = this.storeLock.writeLock(); + writeLock.lock(); + try { + // Drop tables with main disk + this.sessions.dropTable(this.tableNames().toArray(new String[0])); + if (clearSpace) { + this.sessions.clear(); + } + LOG.debug("Store cleared: {}", this.store); + } finally { + writeLock.unlock(); + } + } + + @Override + public boolean initialized() { + return true; + } + + @Override + public void truncate() { + try { + this.sessions.session().truncate(); + } catch (Exception e) { + LOG.error("Store truncated failed", e); + return; + } + LOG.debug("Store truncated: {}", this.store); + } + + @Override + public void beginTx() { + this.sessions.session().beginTx(); + } + + @Override + public void commitTx() { + this.checkOpened(); + Session session = this.sessions.session(); + session.commit(); + } + + @Override + public void rollbackTx() { + this.checkOpened(); + Session session = this.sessions.session(); + session.rollback(); + } + + private void checkConnectionOpened() { + } + + @Override + public Id nextId(HugeType type) { + long counter = 0L; + counter = this.getCounter(type); + E.checkState(counter != 0L, "Please check whether '%s' is OK", + this.provider().type()); + return IdGenerator.of(counter); + } + + @Override + public void setCounterLowest(HugeType type, long lowest) { + this.increaseCounter(type, lowest); + } + + /***************************** Store defines *****************************/ + + public static class HstoreSchemaStore extends HstoreStore { + + public HstoreSchemaStore(BackendStoreProvider provider, String namespace, String store) { + super(provider, namespace, store); + } + + @Override + public boolean isSchemaStore() { + return true; + } + + @Override + public void increaseCounter(HugeType type, long num) { + throw new UnsupportedOperationException( + "HstoreSchemaStore.increaseCounter()"); + } + + @Override + public long getCounter(HugeType type) { + throw new UnsupportedOperationException( + "HstoreSchemaStore.getCounter()"); + } + } + + public static class HstoreGraphStore extends HstoreStore { + + public HstoreGraphStore(BackendStoreProvider provider, + String namespace, String store) { + super(provider, namespace, store); + + registerTableManager(HugeTableType.VERTEX, + new HstoreTables.Vertex(store)); + registerTableManager(HugeTableType.OUT_EDGE, + HstoreTables.Edge.out(store)); + registerTableManager(HugeTableType.IN_EDGE, + HstoreTables.Edge.in(store)); + registerTableManager(HugeTableType.ALL_INDEX_TABLE, + new HstoreTables.IndexTable(store)); + registerTableManager(HugeTableType.OLAP_TABLE, + new HstoreTables.OlapTable(store)); + registerTableManager(HugeTableType.TASK_INFO_TABLE, + new HstoreTables.TaskInfo(store)); + registerTableManager(HugeTableType.SERVER_INFO_TABLE, + new HstoreTables.ServerInfo(store)); + } + + @Override + public boolean isSchemaStore() { + return false; + } + + @Override + public Id nextId(HugeType type) { + throw new UnsupportedOperationException( + "HstoreGraphStore.nextId()"); + } + + @Override + public void increaseCounter(HugeType type, long num) { + throw new UnsupportedOperationException( + "HstoreGraphStore.increaseCounter()"); + } + + @Override + public long getCounter(HugeType type) { + throw new UnsupportedOperationException( + "HstoreGraphStore.getCounter()"); + } + + @Override + public void createOlapTable(Id pkId) { + HstoreTable table = new HstoreTables.OlapTable(this.store()); + LOG.info("Hstore create olap table {}", table.table()); + super.sessions.createTable(table.table()); + LOG.info("Hstore finish create olap table"); + registerTableManager(HugeTableType.OLAP_TABLE, table); + LOG.info("OLAP table {} has been created", table.table()); + } + + @Override + public void checkAndRegisterOlapTable(Id pkId) { + HstoreTable table = new HstoreTables.OlapTable(this.store()); + if (!super.sessions.existsTable(table.table())) { + LOG.error("Found exception: Table '{}' doesn't exist, we'll " + + "recreate it now. Please carefully check the recent" + + "operation in server and computer, then ensure the " + + "integrity of store file.", table.table()); + this.createOlapTable(pkId); + } else { + registerTableManager(HugeTableType.OLAP_TABLE, table); + } + } + + @Override + public void clearOlapTable(Id pkId) { + } + + @Override + public void removeOlapTable(Id pkId) { + } + } + + @Override + public String storedVersion() { + return "1.13"; + } +} diff --git a/hugegraph-server/hugegraph-hstore/src/main/java/org/apache/hugegraph/backend/store/hstore/HstoreTable.java b/hugegraph-server/hugegraph-hstore/src/main/java/org/apache/hugegraph/backend/store/hstore/HstoreTable.java new file mode 100755 index 0000000000..7abe92c0f8 --- /dev/null +++ b/hugegraph-server/hugegraph-hstore/src/main/java/org/apache/hugegraph/backend/store/hstore/HstoreTable.java @@ -0,0 +1,747 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.backend.store.hstore; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; +import java.util.function.BiFunction; +import java.util.function.Function; +import java.util.function.Supplier; + +import org.apache.commons.collections.CollectionUtils; +import org.apache.hugegraph.backend.id.EdgeId; +import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.backend.page.PageState; +import org.apache.hugegraph.backend.query.Aggregate; +import org.apache.hugegraph.backend.query.Aggregate.AggregateFunc; +import org.apache.hugegraph.backend.query.Condition; +import org.apache.hugegraph.backend.query.Condition.Relation; +import org.apache.hugegraph.backend.query.ConditionQuery; +import org.apache.hugegraph.backend.query.IdPrefixQuery; +import org.apache.hugegraph.backend.query.IdRangeQuery; +import org.apache.hugegraph.backend.query.Query; +import org.apache.hugegraph.backend.serializer.BinaryBackendEntry; +import org.apache.hugegraph.backend.serializer.BinaryEntryIterator; +import org.apache.hugegraph.backend.store.BackendEntry; +import org.apache.hugegraph.backend.store.BackendEntry.BackendColumn; +import org.apache.hugegraph.backend.store.BackendEntry.BackendColumnIterator; +import org.apache.hugegraph.backend.store.BackendEntryIterator; +import org.apache.hugegraph.backend.store.BackendTable; +import org.apache.hugegraph.backend.store.Shard; +import org.apache.hugegraph.backend.store.hstore.HstoreSessions.Countable; +import org.apache.hugegraph.backend.store.hstore.HstoreSessions.Session; +import org.apache.hugegraph.exception.NotSupportException; +import org.apache.hugegraph.pd.client.PDClient; +import org.apache.hugegraph.pd.common.PDException; +import org.apache.hugegraph.pd.grpc.Metapb; +import org.apache.hugegraph.store.HgOwnerKey; +import org.apache.hugegraph.store.client.util.HgStoreClientConst; +import org.apache.hugegraph.type.HugeType; +import org.apache.hugegraph.type.define.HugeKeys; +import org.apache.hugegraph.util.E; +import org.apache.hugegraph.util.Log; +import org.apache.hugegraph.util.StringEncoding; +import org.apache.tinkerpop.gremlin.util.iterator.IteratorUtils; +import org.slf4j.Logger; + +public class HstoreTable extends BackendTable { + + private static final Logger LOG = Log.logger(HstoreStore.class); + + private final HstoreShardSplitter shardSpliter; + Function ownerDelegate = (entry) -> getOwner(entry); + Function ownerByIdDelegate = (id) -> getOwnerId(id); + BiFunction ownerByQueryDelegate = + (type, id) -> getOwnerId(type, id); + Supplier ownerScanDelegate = + () -> HgStoreClientConst.ALL_PARTITION_OWNER; + + public HstoreTable(String database, String table) { + super(String.format("%s+%s", database, table)); + this.shardSpliter = new HstoreShardSplitter(this.table()); + } + + public static ConditionQuery removeDirectionCondition(ConditionQuery conditionQuery) { + Collection conditions = conditionQuery.conditions(); + List newConditions = new ArrayList<>(); + for (Condition condition : conditions) { + if (!direction(condition)) { + newConditions.add(condition); + } + } + if (newConditions.size() > 0) { + conditionQuery.resetConditions(newConditions); + return conditionQuery; + } else { + return null; + } + } + + private static boolean direction(Condition condition) { + boolean direction = true; + List relations = condition.relations(); + for (Relation r : relations) { + if (!r.key().equals(HugeKeys.DIRECTION)) { + direction = false; + break; + } + } + return direction; + } + + protected static BackendEntryIterator newEntryIterator( + BackendColumnIterator cols, Query query) { + return new BinaryEntryIterator<>(cols, query, (entry, col) -> { + if (entry == null || !entry.belongToMe(col)) { + HugeType type = query.resultType(); + // NOTE: only support BinaryBackendEntry currently + entry = new BinaryBackendEntry(type, col.name); + } + entry.columns(col); + return entry; + }); + } + + protected static BackendEntryIterator newEntryIteratorOlap( + BackendColumnIterator cols, Query query, boolean isOlap) { + return new BinaryEntryIterator<>(cols, query, (entry, col) -> { + if (entry == null || !entry.belongToMe(col)) { + HugeType type = query.resultType(); + // NOTE: only support BinaryBackendEntry currently + entry = new BinaryBackendEntry(type, col.name, isOlap); + } + entry.columns(col); + return entry; + }); + } + + public static String bytes2String(byte[] bytes) { + StringBuilder result = new StringBuilder(); + for (byte b : bytes) { + String st = String.format("%02x", b); + result.append(st); + } + return result.toString(); + } + + @Override + protected void registerMetaHandlers() { + this.registerMetaHandler("splits", (session, meta, args) -> { + E.checkArgument(args.length == 1, + "The args count of %s must be 1", meta); + long splitSize = (long) args[0]; + return this.shardSpliter.getSplits(session, splitSize); + }); + } + + @Override + public void init(Session session) { + // pass + } + + @Override + public void clear(Session session) { + // pass + } + + public boolean isOlap() { + return false; + } + + private byte[] getOwner(BackendEntry entry) { + if (entry == null) { + return HgStoreClientConst.ALL_PARTITION_OWNER; + } + Id id = entry.type().isIndex() ? entry.id() : entry.originId(); + return getOwnerId(id); + } + // protected byte[] getInsertOwnerId(BackendEntry entry) { + // BinaryBackendEntry.BinaryId currentId = (BinaryBackendEntry.BinaryId) entry.id(); + // Id origin = currentId.origin(); + // if (origin.edge()) { + // origin = ((EdgeId) origin).ownerVertexId(); + // return origin.asBytes(); + // } else { + // return currentId.asBytes(); + // } + // } + + public Supplier getOwnerScanDelegate() { + return ownerScanDelegate; + } + + public byte[] getInsertEdgeOwner(BackendEntry entry) { + Id id = entry.originId(); + id = ((EdgeId) id).ownerVertexId(); + return id.asBytes(); + } + + public byte[] getInsertOwner(BackendEntry entry) { + // 为适应label索引散列,不聚焦在一个分区 + if (entry.type().isLabelIndex() && (entry.columns().size() == 1)) { + Iterator iterator = entry.columns().iterator(); + while (iterator.hasNext()) { + BackendColumn next = iterator.next(); + return next.name; + } + } + + Id id = entry.type().isIndex() ? entry.id() : entry.originId(); + return getOwnerId(id); + } + + /** + * 返回Id所属的点ID + * + * @param id + * @return + */ + protected byte[] getOwnerId(Id id) { + if (id instanceof BinaryBackendEntry.BinaryId) { + id = ((BinaryBackendEntry.BinaryId) id).origin(); + } + if (id != null && id.edge()) { + id = ((EdgeId) id).ownerVertexId(); + } + return id != null ? id.asBytes() : + HgStoreClientConst.ALL_PARTITION_OWNER; + } + + /** + * 返回Id所属的点ID + * + * @param id + * @return + */ + protected byte[] getOwnerId(HugeType type, Id id) { + if (type.equals(HugeType.VERTEX) || type.equals(HugeType.EDGE) || + type.equals(HugeType.EDGE_OUT) || type.equals(HugeType.EDGE_IN) || + type.equals(HugeType.COUNTER)) { + return getOwnerId(id); + } else { + return HgStoreClientConst.ALL_PARTITION_OWNER; + } + } + + @Override + public void insert(Session session, BackendEntry entry) { + byte[] owner = entry.type().isEdge() ? getInsertEdgeOwner(entry) : getInsertOwner(entry); + ArrayList columns = new ArrayList<>(entry.columns()); + for (int i = 0; i < columns.size(); i++) { + BackendColumn col = columns.get(i); + session.put(this.table(), owner, col.name, col.value); + } + } + + public void insert(Session session, BackendEntry entry, boolean isEdge) { + byte[] owner = isEdge ? getInsertEdgeOwner(entry) : getInsertOwner(entry); + ArrayList columns = new ArrayList<>(entry.columns()); + for (int i = 0; i < columns.size(); i++) { + BackendColumn col = columns.get(i); + session.put(this.table(), owner, col.name, col.value); + } + } + + @Override + public void delete(Session session, BackendEntry entry) { + byte[] ownerKey = ownerDelegate.apply(entry); + if (entry.columns().isEmpty()) { + byte[] idBytes = entry.id().asBytes(); + // LOG.debug("Delete from {} with owner {}, id: {}", + // this.table(), bytes2String(ownerKey), idBytes); + session.delete(this.table(), ownerKey, idBytes); + } else { + for (BackendColumn col : entry.columns()) { + // LOG.debug("Delete from {} with owner {}, id: {}", + // this.table(), bytes2String(ownerKey), + // bytes2String(col.name)); + assert entry.belongToMe(col) : entry; + session.delete(this.table(), ownerKey, col.name); + } + } + } + + @Override + public void append(Session session, BackendEntry entry) { + assert entry.columns().size() == 1; + this.insert(session, entry); + } + + @Override + public void eliminate(Session session, BackendEntry entry) { + assert entry.columns().size() == 1; + this.delete(session, entry); + } + + @Override + public boolean queryExist(Session session, BackendEntry entry) { + Id id = entry.id(); + try (BackendColumnIterator iter = this.queryById(session, id)) { + return iter.hasNext(); + } + } + + @Override + public Number queryNumber(Session session, Query query) { + Aggregate aggregate = query.aggregateNotNull(); + if (aggregate.func() != AggregateFunc.COUNT) { + throw new NotSupportException(aggregate.toString()); + } + + assert aggregate.func() == AggregateFunc.COUNT; + assert query.noLimit(); + Iterator results = this.queryBy(session, query); + if (results instanceof Countable) { + return ((Countable) results).count(); + } + return IteratorUtils.count(results); + } + + @Override + public Iterator query(Session session, Query query) { + if (query.limit() == 0L && !query.noLimit()) { + // LOG.debug("Return empty result(limit=0) for query {}", query); + return Collections.emptyIterator(); + } + return newEntryIterator(this.queryBy(session, query), query); + } + + //@Override + //public Iterator queryOlap(Session session, Query query) { + // if (query.limit() == 0L && !query.noLimit()) { + // // LOG.debug("Return empty result(limit=0) for query {}", query); + // return Collections.emptyIterator(); + // } + // return newEntryIteratorOlap(this.queryBy(session, query), query, true); + //} + + public List> query(Session session, + List queries, + String tableName) { + List queryByPrefixList = + this.queryByPrefixList(session, queries, tableName); + LinkedList> iterators = new LinkedList<>(); + for (int i = 0; i < queryByPrefixList.size(); i++) { + IdPrefixQuery q = queries.get(i).copy(); + q.capacity(Query.NO_CAPACITY); + q.limit(Query.NO_LIMIT); + BackendEntryIterator iterator = + newEntryIterator(queryByPrefixList.get(i), q); + iterators.add(iterator); + } + return iterators; + } + + public BackendEntry.BackendIterator> query(Session session, + Iterator queries, + String tableName) { + //final IdPrefixQuery[] first = {queries.next()}; + //int type = first[0].withProperties() ? 0 : Session.SCAN_KEY_ONLY; + // + //IdPrefixQuery queryTmpl = first[0].copy(); + //queryTmpl.capacity(Query.NO_CAPACITY); + //queryTmpl.limit(Query.NO_LIMIT); + // + //ConditionQuery originQuery = (ConditionQuery) first[0].originQuery(); + //if (originQuery != null) { + // originQuery = prepareConditionQueryList(originQuery); + //} + //byte[] queryBytes = originQuery == null ? null : originQuery.bytes(); + // + //BackendEntry.BackendIterator it + // = session.scan(tableName, new Iterator() { + // @Override + // public boolean hasNext() { + // if (first[0] != null) { + // return true; + // } + // return queries.hasNext(); + // } + // + // @Override + // public HgOwnerKey next() { + // IdPrefixQuery query = first[0] != null ? first[0] : queries.next(); + // first[0] = null; + // byte[] prefix = ownerByQueryDelegate.apply(query.resultType(), + // query.prefix()); + // return HgOwnerKey.of(prefix, query.prefix().asBytes()); + // } + //}, type, first[0], queryBytes); + //return new BackendEntry.BackendIterator<>() { + // @Override + // public boolean hasNext() { + // return it.hasNext(); + // } + // + // @Override + // public Iterator next() { + // BackendEntryIterator iterator = newEntryIterator(it.next(), queryTmpl); + // return iterator; + // } + // + // @Override + // public void close() { + // it.close(); + // } + // + // @Override + // public byte[] position() { + // return new byte[0]; + // } + //}; + return null; + } + + protected BackendColumnIterator queryBy(Session session, Query query) { + // Query all + if (query.empty()) { + return this.queryAll(session, query); + } + + // Query by prefix + if (query instanceof IdPrefixQuery) { + IdPrefixQuery pq = (IdPrefixQuery) query; + return this.queryByPrefix(session, pq); + } + + // Query by range + if (query instanceof IdRangeQuery) { + IdRangeQuery rq = (IdRangeQuery) query; + return this.queryByRange(session, rq); + } + + // Query by id + if (query.conditions().isEmpty()) { + assert !query.ids().isEmpty(); + // 单个id查询 走get接口查询 + if (query.ids().size() == 1) { + return this.getById(session, query.ids().iterator().next()); + } + // NOTE: this will lead to lazy create rocksdb iterator + LinkedList hgOwnerKeys = new LinkedList<>(); + for (Id id : query.ids()) { + hgOwnerKeys.add(HgOwnerKey.of(this.ownerByIdDelegate.apply(id), + id.asBytes())); + } + BackendColumnIterator withBatch = session.getWithBatch(this.table(), + hgOwnerKeys); + return BackendColumnIterator.wrap(withBatch); + } + + // Query by condition (or condition + id) + ConditionQuery cq = (ConditionQuery) query; + return this.queryByCond(session, cq); + } + + protected BackendColumnIterator queryAll(Session session, Query query) { + if (query.paging()) { + PageState page = PageState.fromString(query.page()); + byte[] ownerKey = this.getOwnerScanDelegate().get(); + //int scanType = Session.SCAN_ANY | + // (query.withProperties() ? 0 : Session.SCAN_KEY_ONLY); + int scanType = 0; + byte[] queryBytes = query instanceof ConditionQuery ? + ((ConditionQuery) query).bytes() : null; + // LOG.debug("query {} with ownerKeyFrom: {}, ownerKeyTo: {}, " + + // "keyFrom: null, keyTo: null, scanType: {}, " + + // "conditionQuery: {}, position: {}", + // this.table(), bytes2String(ownerKey), + // bytes2String(ownerKey), scanType, + // queryBytes, page.position()); + return session.scan(this.table(), ownerKey, ownerKey, null, + null, scanType, queryBytes, + page.position()); + } + return session.scan(this.table(), + query instanceof ConditionQuery ? + ((ConditionQuery) query).bytes() : null); + } + + protected BackendColumnIterator queryById(Session session, Id id) { + // TODO: change to get() after vertex and schema don't use id prefix + return session.scan(this.table(), this.ownerByIdDelegate.apply(id), + id.asBytes()); + } + + protected BackendColumnIterator getById(Session session, Id id) { + byte[] value = session.get(this.table(), + this.ownerByIdDelegate.apply(id), + id.asBytes()); + if (value.length == 0) { + return BackendColumnIterator.empty(); + } + BackendColumn col = BackendColumn.of(id.asBytes(), value); + return BackendColumnIterator.iterator(col); + } + + protected BackendColumnIterator queryByPrefix(Session session, + IdPrefixQuery query) { + int type = query.inclusiveStart() ? + Session.SCAN_GTE_BEGIN : Session.SCAN_GT_BEGIN; + type |= Session.SCAN_PREFIX_END; + byte[] position = null; + if (query.paging()) { + position = PageState.fromString(query.page()).position(); + } + ConditionQuery originQuery = (ConditionQuery) query.originQuery(); + if (originQuery != null) { + originQuery = prepareConditionQuery(originQuery); + } + byte[] ownerKeyFrom = this.ownerByQueryDelegate.apply(query.resultType(), + query.start()); + byte[] ownerKeyTo = this.ownerByQueryDelegate.apply(query.resultType(), + query.prefix()); + byte[] keyFrom = query.start().asBytes(); + // 前缀分页查询中, start为最初的位置。因为在不同的分区 都是从start位置开始查询 + if (query.paging()) { + keyFrom = query.prefix().asBytes(); + } + byte[] keyTo = query.prefix().asBytes(); + byte[] queryBytes = originQuery == null ? + null : + originQuery.bytes(); + + // LOG.debug("query {} with ownerKeyFrom: {}, ownerKeyTo: {}," + + // "keyFrom: {}, keyTo: {}, scanType: {}, conditionQuery: {}," + + // "position: {}", + // this.table(), bytes2String(ownerKeyFrom), + // bytes2String(ownerKeyTo), bytes2String(keyFrom), + // bytes2String(keyTo), type, originQuery, position); + + return session.scan(this.table(), ownerKeyFrom, ownerKeyTo, keyFrom, + keyTo, type, queryBytes, position); + } + + protected List queryByPrefixList( + Session session, + List queries, + String tableName) { + //E.checkArgument(queries.size() > 0, + // "The size of queries must be greater than zero"); + //IdPrefixQuery query = queries.get(0); + //int type = 0; + //LinkedList ownerKey = new LinkedList<>(); + //queries.forEach((item) -> { + // byte[] prefix = this.ownerByQueryDelegate.apply(item.resultType(), + // item.prefix()); + // ownerKey.add(HgOwnerKey.of(prefix, item.prefix().asBytes())); + //}); + //ConditionQuery originQuery = (ConditionQuery) query.originQuery(); + //if (originQuery != null) { + // originQuery = prepareConditionQueryList(originQuery); + //} + //byte[] queryBytes = originQuery == null ? null : originQuery.bytes(); + // + //// LOG.debug("query {} with scanType: {}, limit: {}, conditionQuery: + //// {}", this.table(), type, query.limit(), queryBytes); + //return session.scan(tableName, ownerKey, type, + // query.limit(), queryBytes); + return null; + } + + /*** + * Prepare ConditionQuery to do operator sinking, because some scenes do not need to be + * preserved + * @param conditionQuery + * @return + */ + private ConditionQuery prepareConditionQuery(ConditionQuery conditionQuery) { + if (CollectionUtils.isEmpty(conditionQuery.userpropConditions())) { + return null; + } + // only userpropConditions can send to store + Collection conditions = conditionQuery.conditions(); + List newConditions = new ArrayList<>(); + for (Condition condition : conditions) { + if (!onlyOwnerVertex(condition)) { + newConditions.add(condition); + } + } + if (newConditions.size() > 0) { + conditionQuery.resetConditions(newConditions); + return conditionQuery; + } else { + return null; + } + } + + /*** + * Prepare ConditionQuery to do operator sinking, because some scenes do not need to be + * preserved + * @param conditionQuery + * @return + */ + private ConditionQuery prepareConditionQueryList(ConditionQuery conditionQuery) { + //if (!conditionQuery.containsLabelOrUserpropRelation()) { + // return null; + //} + //// only userpropConditions can send to store + //Collection conditions = conditionQuery.conditions(); + //List newConditions = new ArrayList<>(); + //for (Condition condition : conditions) { + // if (!onlyOwnerVertex(condition)) { + // newConditions.add(condition); + // } + //} + //if (newConditions.size() > 0) { + // conditionQuery.resetConditions(newConditions); + // return conditionQuery; + //} else { + // return null; + //} + return null; + } + + private boolean onlyOwnerVertex(Condition condition) { + boolean onlyOwnerVertex = true; + List relations = condition.relations(); + for (Relation r : relations) { + if (!r.key().equals(HugeKeys.OWNER_VERTEX)) { + onlyOwnerVertex = false; + break; + } + } + return onlyOwnerVertex; + } + + protected BackendColumnIterator queryByRange(Session session, + IdRangeQuery query) { + byte[] start = query.start().asBytes(); + byte[] end = query.end() == null ? null : query.end().asBytes(); + int type = query.inclusiveStart() ? + Session.SCAN_GTE_BEGIN : Session.SCAN_GT_BEGIN; + if (end != null) { + type |= query.inclusiveEnd() ? + Session.SCAN_LTE_END : Session.SCAN_LT_END; + } + ConditionQuery cq; + Query origin = query.originQuery(); + byte[] position = null; + if (query.paging() && !query.page().isEmpty()) { + position = PageState.fromString(query.page()).position(); + } + byte[] ownerStart = this.ownerByQueryDelegate.apply(query.resultType(), + query.start()); + byte[] ownerEnd = this.ownerByQueryDelegate.apply(query.resultType(), + query.end()); + if (origin instanceof ConditionQuery && + (query.resultType().isEdge() || query.resultType().isVertex())) { + cq = (ConditionQuery) query.originQuery(); + + // LOG.debug("query {} with ownerKeyFrom: {}, ownerKeyTo: {}, " + + // "keyFrom: {}, keyTo: {}, " + + // "scanType: {}, conditionQuery: {}", + // this.table(), bytes2String(ownerStart), + // bytes2String(ownerEnd), bytes2String(start), + // bytes2String(end), type, cq.bytes()); + //return session.scan(this.table(), ownerStart, + // ownerEnd, start, end, type, cq.bytes(), position); + return null; + } + return session.scan(this.table(), ownerStart, + ownerEnd, start, end, type, null, position); + } + + protected BackendColumnIterator queryByCond(Session session, + ConditionQuery query) { + //if (query.containsScanCondition()) { + // E.checkArgument(query.relations().size() == 1, + // "Invalid scan with multi conditions: %s", query); + // Relation scan = query.relations().iterator().next(); + // Shard shard = (Shard) scan.value(); + // return this.queryByRange(session, shard, query); + //} + // throw new NotSupportException("query: %s", query); + return this.queryAll(session, query); + } + + protected BackendColumnIterator queryByRange(Session session, Shard shard, + ConditionQuery query) { + //int type = Session.SCAN_GTE_BEGIN; + //type |= Session.SCAN_LT_END; + //type |= Session.SCAN_HASHCODE; + //type |= query.withProperties() ? 0 : Session.SCAN_KEY_ONLY; + // + //int start = Integer.parseInt(StringUtils.isEmpty(shard.start()) ? + // "0" : shard.start()); + //int end = Integer.parseInt(StringUtils.isEmpty(shard.end()) ? + // "0" : shard.end()); + //byte[] queryBytes = query.bytes(); + //String page = query.page(); + //if (page != null && !page.isEmpty()) { + // byte[] position = PageState.fromString(page).position(); + // return session.scan(this.table(), start, end, type, queryBytes, + // position); + //} + //return session.scan(this.table(), start, end, type, queryBytes); + return null; + } + + private static class HstoreShardSplitter extends ShardSplitter { + + public HstoreShardSplitter(String table) { + super(table); + } + + @Override + public List getSplits(Session session, long splitSize) { + E.checkArgument(splitSize >= MIN_SHARD_SIZE, + "The split-size must be >= %s bytes, but got %s", + MIN_SHARD_SIZE, splitSize); + + List splits = new ArrayList<>(); + try { + PDClient pdClient = HstoreSessionsImpl.getDefaultPdClient(); + List partitions = pdClient.getPartitions(0, + session.getGraphName()); + for (Metapb.Partition partition : partitions) { + String start = String.valueOf(partition.getStartKey()); + String end = String.valueOf(partition.getEndKey()); + splits.add(new Shard(start, end, 0)); + } + } catch (PDException e) { + e.printStackTrace(); + } + + return splits.size() != 0 ? + splits : super.getSplits(session, splitSize); + } + + @Override + public long estimateDataSize(Session session) { + return 1L; + } + + @Override + public long estimateNumKeys(Session session) { + return 1L; + } + + @Override + public byte[] position(String position) { + if (END.equals(position)) { + return null; + } + return StringEncoding.decodeBase64(position); + } + } +} diff --git a/hugegraph-server/hugegraph-hstore/src/main/java/org/apache/hugegraph/backend/store/hstore/HstoreTables.java b/hugegraph-server/hugegraph-hstore/src/main/java/org/apache/hugegraph/backend/store/hstore/HstoreTables.java new file mode 100644 index 0000000000..f4ebf7ebef --- /dev/null +++ b/hugegraph-server/hugegraph-hstore/src/main/java/org/apache/hugegraph/backend/store/hstore/HstoreTables.java @@ -0,0 +1,214 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.backend.store.hstore; + +import java.util.List; + +import org.apache.hugegraph.backend.id.Id; +import org.apache.hugegraph.backend.query.Condition; +import org.apache.hugegraph.backend.query.Condition.Relation; +import org.apache.hugegraph.backend.query.ConditionQuery; +import org.apache.hugegraph.backend.serializer.BinarySerializer; +import org.apache.hugegraph.backend.store.BackendEntry; +import org.apache.hugegraph.backend.store.BackendEntry.BackendColumnIterator; +import org.apache.hugegraph.backend.store.hstore.HstoreSessions.Session; +import org.apache.hugegraph.type.HugeTableType; +import org.apache.hugegraph.type.HugeType; +import org.apache.hugegraph.type.define.HugeKeys; +import org.apache.hugegraph.util.E; + +public class HstoreTables { + + public static class Vertex extends HstoreTable { + + public static final String TABLE = HugeTableType.VERTEX.string(); + + public Vertex(String database) { + super(database, TABLE); + } + + @Override + protected BackendColumnIterator queryById(Session session, Id id) { + return this.getById(session, id); + } + } + + /** + * task信息存储表 + */ + public static class TaskInfo extends HstoreTable { + public static final String TABLE = HugeTableType.TASK_INFO_TABLE.string(); + + public TaskInfo(String database) { + super(database, TABLE); + } + + @Override + protected BackendColumnIterator queryById(Session session, Id id) { + return this.getById(session, id); + } + } + + public static class ServerInfo extends HstoreTable { + public static final String TABLE = HugeTableType.SERVER_INFO_TABLE.string(); + + public ServerInfo(String database) { + super(database, TABLE); + } + + @Override + protected BackendColumnIterator queryById(Session session, Id id) { + return this.getById(session, id); + } + } + + public static class Edge extends HstoreTable { + + public static final String TABLE_SUFFIX = HugeType.EDGE.string(); + + public Edge(boolean out, String database) { + // Edge out/in table + super(database, (out ? HugeTableType.OUT_EDGE.string() : + HugeTableType.IN_EDGE.string())); + } + + public static Edge out(String database) { + return new Edge(true, database); + } + + public static Edge in(String database) { + return new Edge(false, database); + } + + @Override + protected BackendColumnIterator queryById(Session session, Id id) { + return this.getById(session, id); + } + } + + public static class IndexTable extends HstoreTable { + + public static final String TABLE = HugeTableType.ALL_INDEX_TABLE.string(); + + public IndexTable(String database) { + super(database, TABLE); + } + + @Override + public void eliminate(Session session, BackendEntry entry) { + assert entry.columns().size() == 1; + super.delete(session, entry); + } + + @Override + public void delete(Session session, BackendEntry entry) { + /* + * Only delete index by label will come here + * Regular index delete will call eliminate() + */ + byte[] ownerKey = super.ownerDelegate.apply(entry); + for (BackendEntry.BackendColumn column : entry.columns()) { + // Don't assert entry.belongToMe(column), length-prefix is 1* + session.deletePrefix(this.table(), ownerKey, column.name); + } + } + + /** + * 主要用于 range类型的index处理 + * + * @param session + * @param query + * @return + */ + @Override + protected BackendColumnIterator queryByCond(Session session, + ConditionQuery query) { + assert !query.conditions().isEmpty(); + + List conds = query.syspropConditions(HugeKeys.ID); + E.checkArgument(!conds.isEmpty(), + "Please specify the index conditions"); + + Id prefix = null; + Id min = null; + boolean minEq = false; + Id max = null; + boolean maxEq = false; + + for (Condition c : conds) { + Relation r = (Relation) c; + switch (r.relation()) { + case PREFIX: + prefix = (Id) r.value(); + break; + case GTE: + minEq = true; + case GT: + min = (Id) r.value(); + break; + case LTE: + maxEq = true; + case LT: + max = (Id) r.value(); + break; + default: + E.checkArgument(false, "Unsupported relation '%s'", + r.relation()); + } + } + + E.checkArgumentNotNull(min, "Range index begin key is missing"); + byte[] begin = min.asBytes(); + if (!minEq) { + BinarySerializer.increaseOne(begin); + } + byte[] ownerStart = this.ownerScanDelegate.get(); + byte[] ownerEnd = this.ownerScanDelegate.get(); + if (max == null) { + E.checkArgumentNotNull(prefix, "Range index prefix is missing"); + return session.scan(this.table(), ownerStart, ownerEnd, begin, + prefix.asBytes(), Session.SCAN_PREFIX_END); + } else { + byte[] end = max.asBytes(); + int type = maxEq ? Session.SCAN_LTE_END : Session.SCAN_LT_END; + return session.scan(this.table(), ownerStart, + ownerEnd, begin, end, type); + } + } + } + + public static class OlapTable extends HstoreTable { + + public static final String TABLE = HugeTableType.OLAP_TABLE.string(); + + public OlapTable(String database) { + // 由原先多个ap_{pk_id} 合并成一个ap表 + super(database, TABLE); + } + + @Override + protected BackendColumnIterator queryById(Session session, Id id) { + return this.getById(session, id); + } + + @Override + public boolean isOlap() { + return true; + } + } +} diff --git a/hugegraph-server/hugegraph-hstore/src/main/java/org/apache/hugegraph/backend/store/hstore/fake/IdClient.java b/hugegraph-server/hugegraph-hstore/src/main/java/org/apache/hugegraph/backend/store/hstore/fake/IdClient.java new file mode 100644 index 0000000000..3e42bce2d7 --- /dev/null +++ b/hugegraph-server/hugegraph-hstore/src/main/java/org/apache/hugegraph/backend/store/hstore/fake/IdClient.java @@ -0,0 +1,54 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.backend.store.hstore.fake; + +import java.nio.ByteBuffer; +import java.nio.ByteOrder; + +import org.apache.hugegraph.backend.store.hstore.HstoreSessions; +import org.apache.hugegraph.pd.grpc.Pdpb; + +public abstract class IdClient { + + protected HstoreSessions.Session session; + protected String table; + + public IdClient(HstoreSessions.Session session, String table) { + this.session = session; + this.table = table; + } + + protected static byte[] b(long value) { + return ByteBuffer.allocate(Long.BYTES).order( + ByteOrder.nativeOrder()).putLong(value).array(); + } + + protected static long l(byte[] bytes) { + assert bytes.length == Long.BYTES; + return ByteBuffer.wrap(bytes).order( + ByteOrder.nativeOrder()).getLong(); + } + + public abstract Pdpb.GetIdResponse getIdByKey(String key, int delta) + throws Exception; + + public abstract Pdpb.ResetIdResponse resetIdByKey(String key) throws Exception; + + public abstract void increaseId(String key, long increment) + throws Exception; +} diff --git a/hugegraph-server/hugegraph-hstore/src/main/java/org/apache/hugegraph/backend/store/hstore/fake/PDIdClient.java b/hugegraph-server/hugegraph-hstore/src/main/java/org/apache/hugegraph/backend/store/hstore/fake/PDIdClient.java new file mode 100644 index 0000000000..0dbfc56eec --- /dev/null +++ b/hugegraph-server/hugegraph-hstore/src/main/java/org/apache/hugegraph/backend/store/hstore/fake/PDIdClient.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hugegraph.backend.store.hstore.fake; + +import org.apache.hugegraph.backend.store.hstore.HstoreSessions; +import org.apache.hugegraph.backend.store.hstore.HstoreSessionsImpl; +import org.apache.hugegraph.pd.client.PDClient; +import org.apache.hugegraph.pd.grpc.Pdpb; + +public class PDIdClient extends IdClient { + + PDClient pdClient; + + public PDIdClient(HstoreSessions.Session session, String table) { + super(session, table); + pdClient = HstoreSessionsImpl.getDefaultPdClient(); + } + + @Override + public Pdpb.GetIdResponse getIdByKey(String key, int delta) throws Exception { + return pdClient.getIdByKey(key, delta); + } + + @Override + public Pdpb.ResetIdResponse resetIdByKey(String key) throws Exception { + return pdClient.resetIdByKey(key); + } + + @Override + public void increaseId(String key, long increment) throws Exception { + pdClient.getIdByKey(key, (int) increment); + } +} diff --git a/hugegraph-server/hugegraph-mysql/src/main/java/org/apache/hugegraph/backend/store/mysql/MysqlStore.java b/hugegraph-server/hugegraph-mysql/src/main/java/org/apache/hugegraph/backend/store/mysql/MysqlStore.java index bb99cdbd58..99f8cabda4 100644 --- a/hugegraph-server/hugegraph-mysql/src/main/java/org/apache/hugegraph/backend/store/mysql/MysqlStore.java +++ b/hugegraph-server/hugegraph-mysql/src/main/java/org/apache/hugegraph/backend/store/mysql/MysqlStore.java @@ -349,7 +349,7 @@ protected Collection tables() { @Override protected final MysqlTable table(HugeType type) { assert type != null; - MysqlTable table = this.tables.get(type); + MysqlTable table = this.tables.get(convertTaskOrServerToVertex(type)); if (table == null) { throw new BackendException("Unsupported table type: %s", type); } diff --git a/hugegraph-server/hugegraph-rocksdb/src/main/java/org/apache/hugegraph/backend/store/rocksdb/RocksDBStore.java b/hugegraph-server/hugegraph-rocksdb/src/main/java/org/apache/hugegraph/backend/store/rocksdb/RocksDBStore.java index 2dba5fa766..1d0cdba7b6 100644 --- a/hugegraph-server/hugegraph-rocksdb/src/main/java/org/apache/hugegraph/backend/store/rocksdb/RocksDBStore.java +++ b/hugegraph-server/hugegraph-rocksdb/src/main/java/org/apache/hugegraph/backend/store/rocksdb/RocksDBStore.java @@ -151,7 +151,7 @@ protected void unregisterTableManager(String name) { @Override protected final RocksDBTable table(HugeType type) { - RocksDBTable table = this.tables.get(type); + RocksDBTable table = this.tables.get(convertTaskOrServerToVertex(type)); if (table == null) { throw new BackendException("Unsupported table: '%s'", type); } diff --git a/hugegraph-server/pom.xml b/hugegraph-server/pom.xml index 44a0e58edc..7a6ee74a8e 100644 --- a/hugegraph-server/pom.xml +++ b/hugegraph-server/pom.xml @@ -129,6 +129,7 @@ hugegraph-palo hugegraph-hbase hugegraph-postgresql + hugegraph-hstore @@ -297,19 +298,16 @@ io.grpc grpc-netty ${grpc.version} - provided io.grpc grpc-protobuf ${grpc.version} - provided io.grpc grpc-stub ${grpc.version} - provided com.google.protobuf diff --git a/hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/client/grpc/GrpcStoreNodeSessionImpl.java b/hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/client/grpc/GrpcStoreNodeSessionImpl.java index afa831573b..180d3357f7 100644 --- a/hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/client/grpc/GrpcStoreNodeSessionImpl.java +++ b/hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/client/grpc/GrpcStoreNodeSessionImpl.java @@ -70,6 +70,7 @@ class GrpcStoreNodeSessionImpl implements HgStoreNodeSession { put("g+index", 4); put("g+task", 5); put("g+olap", 6); + put("g+server", 7); }}; private final HgStoreNode storeNode; private final String graphName; diff --git a/hugegraph-store/hg-store-core/pom.xml b/hugegraph-store/hg-store-core/pom.xml index e8232672b2..4c3ae2269b 100644 --- a/hugegraph-store/hg-store-core/pom.xml +++ b/hugegraph-store/hg-store-core/pom.xml @@ -104,22 +104,17 @@ ${revision} compile - org.apache.hugegraph - hg-store-common + hugegraph-core ${revision} - org.apache.hugegraph - hugegraph-core - ${hugegraph.core.version} - system - - ${top.level.dir}/hg-store-dist/src/assembly/ext-lib/hugegraph-core-1.5.0.jar - + hg-store-common + ${revision} + org.apache.tinkerpop diff --git a/hugegraph-store/hg-store-core/src/main/java/org/apache/hugegraph/store/business/AbstractSelectIterator.java b/hugegraph-store/hg-store-core/src/main/java/org/apache/hugegraph/store/business/AbstractSelectIterator.java index eb9928aa59..6972b41266 100644 --- a/hugegraph-store/hg-store-core/src/main/java/org/apache/hugegraph/store/business/AbstractSelectIterator.java +++ b/hugegraph-store/hg-store-core/src/main/java/org/apache/hugegraph/store/business/AbstractSelectIterator.java @@ -48,7 +48,7 @@ public HugeElement parseEntry(BackendEntry entry, boolean isVertex) { return this.serializer.readVertex(null, entry); } else { CIter itr = - this.serializer.readEdges(null, entry, true, false); + this.serializer.readEdges(null, entry); // Iterator itr = this.serializer.readEdges( // null, entry, true, false).iterator(); diff --git a/hugegraph-store/hg-store-core/src/main/java/org/apache/hugegraph/store/business/BusinessHandler.java b/hugegraph-store/hg-store-core/src/main/java/org/apache/hugegraph/store/business/BusinessHandler.java index b8d51184d8..62ded85ece 100644 --- a/hugegraph-store/hg-store-core/src/main/java/org/apache/hugegraph/store/business/BusinessHandler.java +++ b/hugegraph-store/hg-store-core/src/main/java/org/apache/hugegraph/store/business/BusinessHandler.java @@ -50,9 +50,10 @@ public interface BusinessHandler extends DBSessionBuilder { String tableIndex = "g+index"; String tableTask = "g+task"; String tableOlap = "g+olap"; + String tableServer = "g+server"; String[] tables = new String[]{tableUnknown, tableVertex, tableOutEdge, tableInEdge, tableIndex, - tableTask, tableOlap}; + tableTask, tableOlap, tableServer}; void doPut(String graph, int code, String table, byte[] key, byte[] value) throws HgStoreException; diff --git a/hugegraph-store/hg-store-dist/src/assembly/ext-lib/hugegraph-core-1.5.0.jar b/hugegraph-store/hg-store-dist/src/assembly/ext-lib/hugegraph-core-1.5.0.jar deleted file mode 100644 index 72a4b20519..0000000000 Binary files a/hugegraph-store/hg-store-dist/src/assembly/ext-lib/hugegraph-core-1.5.0.jar and /dev/null differ diff --git a/hugegraph-store/hg-store-dist/src/assembly/ext-lib/hugegraph-core-1.5.0.pom b/hugegraph-store/hg-store-dist/src/assembly/ext-lib/hugegraph-core-1.5.0.pom deleted file mode 100644 index 90d828a15b..0000000000 --- a/hugegraph-store/hg-store-dist/src/assembly/ext-lib/hugegraph-core-1.5.0.pom +++ /dev/null @@ -1,441 +0,0 @@ - - - - 4.0.0 - - org.apache.hugegraph - hugegraph - 1.5.0 - - org.apache.hugegraph - hugegraph-core - 1.5.0 - - - Apache License, Version 2.0 - https://www.apache.org/licenses/LICENSE-2.0.txt - repo - - - - ${basedir}/.. - 5.6.0 - - - - org.apache.hugegraph - hugegraph-common - 1.0.1 - - - jakarta.activation - jakarta.activation-api - - - jakarta.xml.bind - jakarta.xml.bind-api - - - org.glassfish.jersey.core - jersey-client - - - org.apache.commons - commons-text - - - com.google.errorprone - error_prone_annotations - - - org.glassfish.jersey.core - jersey-common - - - - - org.apache.hugegraph - hg-pd-client - ${pdclient.version} - - - org.projectlombok - lombok - - - com.google.errorprone - error_prone_annotations - - - - - io.netty - netty-buffer - 4.1.52.Final - - - org.apache.tinkerpop - gremlin-core - - - org.yaml - snakeyaml - - - org.apache.commons - commons-lang3 - - - org.apache.commons - commons-configuration2 - - - org.apache.commons - commons-text - - - - - org.apache.tinkerpop - gremlin-groovy - - - com.github.jeremyh - jBCrypt - - - org.apache.commons - commons-lang3 - - - - - org.apache.tinkerpop - tinkergraph-gremlin - - - org.apache.commons - commons-lang3 - - - - - org.apache.tinkerpop - gremlin-test - - - org.apache.tinkerpop - gremlin-driver - - - org.apache.commons - commons-lang3 - - - io.netty - netty-all - - - - - com.google.protobuf - protobuf-java - 3.17.3 - - - org.caffinitas.ohc - ohc-core - 0.7.0 - - - com.google.guava - guava - - - - - com.github.ben-manes.caffeine - caffeine - - - org.apdplat - word - 1.3 - - - ch.qos.logback - logback-classic - - - - - org.ansj - ansj_seg - 5.1.6 - - - com.hankcs - hanlp - portable-1.5.0 - - - org.apache.lucene - lucene-analyzers-smartcn - 7.4.0 - - - org.apache.lucene - lucene-core - 7.4.0 - - - com.huaban - jieba-analysis - 1.0.2 - - - org.apache.commons - commons-lang3 - - - - - org.lionsoul - jcseg-core - 2.2.0 - - - com.chenlb.mmseg4j - mmseg4j-core - 1.10.0 - - - com.janeluo - ikanalyzer - 2012_u6 - - - org.lz4 - lz4-java - 1.7.1 - - - org.apache.commons - commons-compress - 1.20 - - - org.eclipse.collections - eclipse-collections-api - 10.4.0 - - - org.eclipse.collections - eclipse-collections - 10.4.0 - - - it.unimi.dsi - fastutil - 8.1.0 - - - io.jsonwebtoken - jjwt-api - 0.11.2 - - - io.jsonwebtoken - jjwt-impl - 0.11.2 - runtime - - - io.jsonwebtoken - jjwt-jackson - 0.11.2 - runtime - - - com.fasterxml.jackson.core - jackson-databind - - - - - io.etcd - jetcd-core - 0.5.9 - - - io.netty - netty-handler - - - com.google.errorprone - error_prone_annotations - - - - - com.google.code.gson - gson - 2.8.9 - - - io.netty - netty-tcnative-boringssl-static - 2.0.26.Final - runtime - - - io.netty - netty-handler - 4.1.52.Final - - - io.netty - netty-all - 4.1.52.Final - - - io.fabric8 - kubernetes-client - ${fabric8.version} - - - com.fasterxml.jackson.core - jackson-core - - - com.fasterxml.jackson.core - jackson-annotations - - - com.fasterxml.jackson.core - jackson-databind - - - - - org.apache.hugegraph - computer-k8s - 1.0.0 - - - org.apache.hugegraph - hugegraph-common - - - org.apache.hugegraph - hugegraph-client - - - - - org.apache.commons - commons-text - 1.10.0 - - - org.apache.kafka - kafka-clients - 3.4.0 - - - io.dropwizard.metrics - metrics-core - - - org.glassfish.jersey.core - jersey-client - ${jersey.version} - - - org.glassfish.hk2 - hk2-locator - - - - - org.glassfish.hk2 - hk2-locator - 3.0.1 - - - org.apache.hugegraph - hg-store-common - ${hgstore-common.version} - - - - - - true - ${basedir}/src/main/resources - - - false - src/main/jni - - - - - ${basedir}/src/test/resources - - - ${basedir}/target - - - maven-clean-plugin - 3.0.0 - - - - ${top.level.dir} - - *.tar.gz - - false - - - ${final.name} - - - - - - maven-jar-plugin - 3.0.2 - - - true - - false - true - - - 3.6.3.0 - - - - - - - diff --git a/hugegraph-store/hg-store-node/pom.xml b/hugegraph-store/hg-store-node/pom.xml index 22bffb49c1..ec0edf4824 100644 --- a/hugegraph-store/hg-store-node/pom.xml +++ b/hugegraph-store/hg-store-node/pom.xml @@ -121,17 +121,6 @@ org.apache.hugegraph hg-store-core - - - org.apache.hugegraph - hugegraph-core - ${hugegraph.core.version} - system - - ${top.level.dir}/hg-store-dist/src/assembly/ext-lib/hugegraph-core-1.5.0.jar - - - com.taobao.arthas diff --git a/hugegraph-store/hg-store-test/pom.xml b/hugegraph-store/hg-store-test/pom.xml index 31ec720766..d66a36b62b 100644 --- a/hugegraph-store/hg-store-test/pom.xml +++ b/hugegraph-store/hg-store-test/pom.xml @@ -210,16 +210,6 @@ ${revision} compile - - - org.apache.hugegraph - hugegraph-core - ${hugegraph.core.version} - system - - ${top.level.dir}/hg-store-dist/src/assembly/ext-lib/hugegraph-core-1.5.0.jar - - diff --git a/pom.xml b/pom.xml index 5af212fd61..acbb9f6770 100644 --- a/pom.xml +++ b/pom.xml @@ -93,8 +93,11 @@ + hugegraph-pd + hugegraph-store hugegraph-server + stage