From 534507dc0a8adff90cfbb1d0ffeb072730a9585d Mon Sep 17 00:00:00 2001 From: imbajin Date: Tue, 19 Jul 2022 01:58:32 +0800 Subject: [PATCH 1/9] feature(client): read & write bypass server init step --- .../baidu/hugegraph/example/BatchExample.java | 2 +- .../serializer/direct/BinaryEntry.java | 256 ++++ .../serializer/direct/RocksDBSerializer.java | 405 ++++++ .../serializer/direct/backup/HugeEdge.java | 492 +++++++ .../serializer/direct/backup/HugeElement.java | 410 ++++++ .../serializer/direct/backup/HugeVertex.java | 386 +++++ .../direct/backup/OldSerializer.java | 1294 +++++++++++++++++ .../serializer/direct/reuse/BytesDemo.java | 199 +++ .../serializer/direct/struct/DataType.java | 205 +++ .../direct/struct/ElementBuilder.java | 625 ++++++++ .../serializer/direct/struct/HugeType.java | 190 +++ .../serializer/direct/util/BytesBuffer.java | 909 ++++++++++++ .../serializer/direct/util/DataTypeUtil.java | 332 +++++ .../serializer/direct/util/EdgeId.java | 242 +++ .../serializer/direct/util/GraphSchema.java | 105 ++ .../serializer/direct/util/HugeException.java | 56 + .../hugegraph/serializer/direct/util/Id.java | 70 + .../direct/util/SplicingIdGenerator.java | 134 ++ .../direct/util/StringEncoding.java | 166 +++ .../hugegraph/structure/GraphElement.java | 4 + .../structure/constant/Direction.java | 4 +- .../structure/constant/HugeType.java | 4 +- .../baidu/hugegraph/structure/graph/Edge.java | 1 + .../hugegraph/structure/graph/Vertex.java | 1 + .../hugegraph/loader/builder/EdgeBuilder.java | 4 +- .../loader/flink/HugeGraphOutputFormat.java | 5 +- .../hugegraph/loader/util/DataTypeUtil.java | 2 +- 27 files changed, 6492 insertions(+), 11 deletions(-) create mode 100644 hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/BinaryEntry.java create mode 100644 hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/RocksDBSerializer.java create mode 100644 hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/backup/HugeEdge.java create mode 100644 hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/backup/HugeElement.java create mode 100644 hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/backup/HugeVertex.java create mode 100644 hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/backup/OldSerializer.java create mode 100644 hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/reuse/BytesDemo.java create mode 100644 hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/struct/DataType.java create mode 100644 hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/struct/ElementBuilder.java create mode 100644 hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/struct/HugeType.java create mode 100644 hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/BytesBuffer.java create mode 100644 hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/DataTypeUtil.java create mode 100644 hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/EdgeId.java create mode 100644 hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/GraphSchema.java create mode 100644 hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/HugeException.java create mode 100644 hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/Id.java create mode 100644 hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/SplicingIdGenerator.java create mode 100644 hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/StringEncoding.java diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/example/BatchExample.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/example/BatchExample.java index 94f59e5e2..8f2da31fc 100644 --- a/hugegraph-client/src/main/java/com/baidu/hugegraph/example/BatchExample.java +++ b/hugegraph-client/src/main/java/com/baidu/hugegraph/example/BatchExample.java @@ -31,7 +31,7 @@ public class BatchExample { public static void main(String[] args) { - // If connect failed will throw a exception. + // If connect failed will throw an exception. HugeClient hugeClient = HugeClient.builder("http://localhost:8080", "hugegraph").build(); diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/BinaryEntry.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/BinaryEntry.java new file mode 100644 index 000000000..5232f686e --- /dev/null +++ b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/BinaryEntry.java @@ -0,0 +1,256 @@ +/* + * Copyright 2017 HugeGraph Authors + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.baidu.hugegraph.serializer.direct; + +import static com.baidu.hugegraph.serializer.direct.util.StringEncoding.decode; + +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; + +import com.baidu.hugegraph.serializer.direct.struct.HugeType; +import com.baidu.hugegraph.serializer.direct.util.BytesBuffer; +import com.baidu.hugegraph.serializer.direct.util.Id; +import com.baidu.hugegraph.util.Bytes; +import com.baidu.hugegraph.util.E; + +public class BinaryEntry { + + private final HugeType type; + private final BinaryId id; + private Id subId; + private final List columns; + + public static final long COMMIT_BATCH = 500L; + + public BinaryEntry(HugeType type, byte[] bytes) { + this(type, BytesBuffer.wrap(bytes).parseId(type, false)); + } + + public BinaryEntry(HugeType type, byte[] bytes, boolean enablePartition) { + this(type, BytesBuffer.wrap(bytes).parseId(type, enablePartition)); + } + + public BinaryEntry(HugeType type, BinaryId id) { + this.type = type; + this.id = id; + this.subId = null; + this.columns = new ArrayList<>(); + } + + public HugeType type() { + return this.type; + } + + public BinaryId id() { + return this.id; + } + + public Id originId() { + return this.id.origin(); + } + + public Id subId() { + return this.subId; + } + + public void subId(Id subId) { + this.subId = subId; + } + + @Override + public String toString() { + return String.format("%s: %s", this.id, this.columns.toString()); + } + + public BackendColumn column(byte[] name) { + for (BackendColumn col : this.columns) { + if (Bytes.equals(col.name, name)) { + return col; + } + } + return null; + } + + public void column(BackendColumn column) { + this.columns.add(column); + } + + public void column(byte[] name, byte[] value) { + E.checkNotNull(name, "name"); + value = value != null ? value : BytesBuffer.BYTES_EMPTY; + this.columns.add(BackendColumn.of(name, value)); + } + + public Collection columns() { + return Collections.unmodifiableList(this.columns); + } + + public int columnsSize() { + return this.columns.size(); + } + + public void columns(Collection bytesColumns) { + this.columns.addAll(bytesColumns); + } + + public void columns(BackendColumn bytesColumn) { + this.columns.add(bytesColumn); + long maxSize = COMMIT_BATCH; + if (this.columns.size() > maxSize) { + E.checkState(false, "Too many columns in one entry: %s", maxSize); + } + } + + public BackendColumn removeColumn(int index) { + return this.columns.remove(index); + } + + public void clear() { + this.columns.clear(); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof BinaryEntry)) { + return false; + } + BinaryEntry other = (BinaryEntry) obj; + if (this.id() != other.id() && !this.id().equals(other.id())) { + return false; + } + if (this.columns.size() != other.columns.size()) { + return false; + } + return new HashSet<>(this.columns).containsAll(other.columns); + } + + @Override + public int hashCode() { + return this.id().hashCode() ^ this.columns.size(); + } + + protected static final class BinaryId implements Id { + + private final byte[] bytes; + private final Id id; + + public BinaryId(byte[] bytes, Id id) { + this.bytes = bytes; + this.id = id; + } + + public Id origin() { + return this.id; + } + + @Override + public byte[] asBytes() { + return this.bytes; + } + + @Override + public IdType type() { + return IdType.UNKNOWN; + } + + @Override + public boolean edge() { + return Id.super.edge(); + } + + public byte[] asBytes(int offset) { + E.checkArgument(offset < this.bytes.length, + "Invalid offset %s, must be < length %s", + offset, this.bytes.length); + return Arrays.copyOfRange(this.bytes, offset, this.bytes.length); + } + + @Override + public int length() { + return this.bytes.length; + } + + @Override + public int hashCode() { + return ByteBuffer.wrap(this.bytes).hashCode(); + } + + @Override + public boolean equals(Object other) { + if (!(other instanceof BinaryId)) { + return false; + } + return Arrays.equals(this.bytes, ((BinaryId) other).bytes); + } + + @Override + public String toString() { + return "0x" + Bytes.toHex(this.bytes); + } + } + + static class BackendColumn implements Comparable { + + public byte[] name; + public byte[] value; + + public static BackendColumn of(byte[] name, byte[] value) { + BackendColumn col = new BackendColumn(); + col.name = name; + col.value = value; + return col; + } + + @Override + public String toString() { + return String.format("%s=%s", decode(name), decode(value)); + } + + @Override + public int compareTo(BackendColumn other) { + if (other == null) { + return 1; + } + return Bytes.compare(this.name, other.name); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof BackendColumn)) { + return false; + } + BackendColumn other = (BackendColumn) obj; + return Bytes.equals(this.name, other.name) && + Bytes.equals(this.value, other.value); + } + + @Override + public int hashCode() { + return Arrays.hashCode(this.name) ^ + Arrays.hashCode(this.value); + } + + } +} diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/RocksDBSerializer.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/RocksDBSerializer.java new file mode 100644 index 000000000..0ead419c6 --- /dev/null +++ b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/RocksDBSerializer.java @@ -0,0 +1,405 @@ +/* + * Copyright 2017 HugeGraph Authors + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.baidu.hugegraph.serializer.direct; + +import static com.baidu.hugegraph.serializer.direct.BinaryEntry.BackendColumn; +import static com.baidu.hugegraph.serializer.direct.BinaryEntry.BinaryId; +import static com.baidu.hugegraph.serializer.direct.struct.HugeType.EDGE; +import static com.baidu.hugegraph.serializer.direct.struct.HugeType.VERTEX; +import static com.baidu.hugegraph.structure.graph.Graph.HugeEdge; +import static com.baidu.hugegraph.structure.graph.Graph.HugeVertex; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Iterator; +import java.util.Map; + +import com.baidu.hugegraph.HugeGraph; +import com.baidu.hugegraph.backend.id.IdGenerator; +import com.baidu.hugegraph.driver.HugeClient; +import com.baidu.hugegraph.serializer.direct.struct.HugeElement; +import com.baidu.hugegraph.serializer.direct.struct.HugeType; +import com.baidu.hugegraph.serializer.direct.util.BytesBuffer; +import com.baidu.hugegraph.serializer.direct.util.EdgeId; +import com.baidu.hugegraph.serializer.direct.util.GraphSchema; +import com.baidu.hugegraph.serializer.direct.util.HugeException; +import com.baidu.hugegraph.serializer.direct.util.Id; +import com.baidu.hugegraph.serializer.direct.util.StringEncoding; +import com.baidu.hugegraph.structure.GraphElement; +import com.baidu.hugegraph.structure.HugeProperty; +import com.baidu.hugegraph.structure.SchemaElement; +import com.baidu.hugegraph.structure.constant.Cardinality; +import com.baidu.hugegraph.structure.graph.Edge; +import com.baidu.hugegraph.structure.graph.Vertex; +import com.baidu.hugegraph.structure.schema.EdgeLabel; +import com.baidu.hugegraph.structure.schema.PropertyKey; +import com.baidu.hugegraph.structure.schema.VertexLabel; +import com.baidu.hugegraph.type.define.HugeKeys; +import com.baidu.hugegraph.util.Bytes; +import com.baidu.hugegraph.util.E; + +/** + * In this serializer, we only support normal type now: + * - number + * - string + * And they will be transferred to bytes directly + **/ +public class RocksDBSerializer { + + /* + * ID is stored in column name if keyWithIdPrefix=true like RocksDB, stored in rowkey for HBase + */ + private final boolean keyWithIdPrefix; + private final boolean indexWithIdPrefix; + private final boolean enablePartition; + GraphSchema schema; + + public RocksDBSerializer(HugeClient client) { + this(true, true, false, client); + } + + public RocksDBSerializer(boolean keyWithIdPrefix, boolean indexWithIdPrefix, + boolean enablePartition, HugeClient client) { + // only consider rocksdb now + this.keyWithIdPrefix = true; + this.indexWithIdPrefix = true; + this.enablePartition = false; + this.schema = new GraphSchema(client); + } + + protected BinaryEntry newBackendEntry(HugeType type, Id id) { + if (type == VERTEX) { + BytesBuffer buffer = BytesBuffer.allocate(2 + 1 + id.length()); + writePartitionedId(VERTEX, id, buffer); + return new BinaryEntry(type, buffer.bytes()); + } + + if (type == EDGE) { + return new BinaryEntry(type, (BinaryId) id); + } + + BytesBuffer buffer = BytesBuffer.allocate(1 + id.length()); + byte[] idBytes = buffer.writeId(id).bytes(); + return new BinaryEntry(type, new BinaryId(idBytes, id)); + } + + protected final BinaryEntry newBackendEntry(Vertex vertex) { + return newBackendEntry(VERTEX, vertex.id()); + } + + protected final BinaryEntry newBackendEntry(Edge edge) { + BinaryId id = writeEdgeId(edge.idWithDirection()); + return newBackendEntry(EDGE, id); + } + + protected final BinaryEntry newBackendEntry(SchemaElement elem) { + return newBackendEntry(elem.type(), elem.id()); + } + + protected BackendColumn formatLabel(GraphElement elem) { + BackendColumn col = new BackendColumn(); + col.name = this.formatSyspropName(elem.id(), HugeKeys.LABEL); + Id label = elem.schemaLabel().id(); + BytesBuffer buffer = BytesBuffer.allocate(label.length() + 1); + col.value = buffer.writeId(label).bytes(); + return col; + } + + protected byte[] formatPropertyName(HugeProperty prop) { + Id id = prop.element().id(); + int idLen = this.keyWithIdPrefix ? 1 + id.length() : 0; + Id pkeyId = prop.propertyKey().id(); + BytesBuffer buffer = BytesBuffer.allocate(idLen + 2 + pkeyId.length()); + if (this.keyWithIdPrefix) { + buffer.writeId(id); + } + buffer.write(prop.type().code()); + buffer.writeId(pkeyId); + return buffer.bytes(); + } + + protected BackendColumn formatProperty(HugeProperty prop) { + BytesBuffer buffer = BytesBuffer.allocate(BytesBuffer.BUF_PROPERTY); + buffer.writeProperty(prop.propertyKey(), prop.value()); + return BackendColumn.of(this.formatPropertyName(prop), buffer.bytes()); + } + + protected void parseProperty(Id pkeyId, BytesBuffer buffer, GraphElement owner) { + PropertyKey pkey = owner.graph().propertyKey(pkeyId); + + // Parse value + Object value = buffer.readProperty(pkey); + + // Set properties of vertex/edge + if (pkey.cardinality() == Cardinality.SINGLE) { + owner.addProperty(pkey, value); + } else { + if (!(value instanceof Collection)) { + throw new HugeException("Invalid value of non-single property: %s", value); + } + owner.addProperty(pkey, value); + } + } + + protected void formatProperties(Map props, + BytesBuffer buffer) { + // Write properties size + buffer.writeVInt(props.size()); + + // Write properties data + for (Map.Entry kv : props.entrySet()) { + String key = kv.getKey(); + Object value = kv.getValue(); + PropertyKey pkey = + } + + for (HugeProperty property : props) { + PropertyKey pkey = property.propertyKey(); + buffer.writeVInt(SchemaElement.schemaId(pkey.id())); + buffer.writeProperty(pkey, property.value()); + } + } + + protected void parseProperties(BytesBuffer buffer, HugeElement owner) { + int size = buffer.readVInt(); + assert size >= 0; + for (int i = 0; i < size; i++) { + Id pkeyId = IdGenerator.of(buffer.readVInt()); + this.parseProperty(pkeyId, buffer, owner); + } + } + + protected byte[] formatEdgeValue(Edge edge) { + int propsCount = edge.sizeOfProperties(); + BytesBuffer buffer = BytesBuffer.allocate(4 + 16 * propsCount); + + // Write edge id + //buffer.writeId(edge.id()); + + // Write edge properties + this.formatProperties(edge.properties(), buffer); + return buffer.bytes(); + } + + protected void parseEdge(BackendColumn col, HugeVertex vertex) { + // owner-vertex + dir + edge-label + sort-values + other-vertex + + BytesBuffer buffer = BytesBuffer.wrap(col.name); + if (this.keyWithIdPrefix) { + // Consume owner-vertex id + buffer.readId(); + } + byte type = buffer.read(); + Id labelId = buffer.readId(); + String sortValues = buffer.readStringWithEnding(); + Id otherVertexId = buffer.readId(); + + boolean direction = EdgeId.isOutDirectionFromCode(type); + EdgeLabel edgeLabel = schema.getEdgeLabel(labelId); + + // Construct edge + HugeEdge edge = HugeEdge.constructEdge(vertex, direction, edgeLabel, + sortValues, otherVertexId); + + // Parse edge-id + edge-properties + buffer = BytesBuffer.wrap(col.value); + + //Id id = buffer.readId(); + + // Parse edge properties + this.parseProperties(buffer, edge); + } + + protected void parseVertex(byte[] value, HugeVertex vertex) { + BytesBuffer buffer = BytesBuffer.wrap(value); + + // Parse vertex label + VertexLabel label = schema.getVertexLabel(buffer.readId()); + vertex.correctVertexLabel(label); + + // Parse properties + this.parseProperties(buffer, vertex); + } + + protected void parseColumn(BackendColumn col, Vertex vertex) { + BytesBuffer buffer = BytesBuffer.wrap(col.name); + Id id = this.keyWithIdPrefix ? buffer.readId() : vertex.id(); + E.checkState(buffer.remaining() > 0, "Missing column type"); + byte type = buffer.read(); + // Parse property + if (type == HugeType.PROPERTY.code()) { + Id pkeyId = buffer.readId(); + this.parseProperty(pkeyId, BytesBuffer.wrap(col.value), vertex); + } + // Parse edge + else if (type == HugeType.EDGE_IN.code() || + type == HugeType.EDGE_OUT.code()) { + this.parseEdge(col, vertex); + } else { + E.checkState(false, "Invalid entry(%s) with unknown type(%s): 0x%s", + id, type & 0xff, Bytes.toHex(col.name)); + } + } + + public BinaryEntry writeVertex(Vertex vertex) { + BinaryEntry entry = newBackendEntry(vertex); + + int propsCount = vertex.sizeOfProperties(); + BytesBuffer buffer = BytesBuffer.allocate(8 + 16 * propsCount); + + // Write vertex label + buffer.writeId(vertex.schemaLabel().id()); + + // Write all properties of the vertex + this.formatProperties(vertex.properties(), buffer); + + // Fill column + byte[] name = this.keyWithIdPrefix ? entry.id().asBytes() : BytesBuffer.BYTES_EMPTY; + entry.column(name, buffer.bytes()); + + return entry; + } + + public HugeVertex readVertex(BinaryEntry bytesEntry) { + if (bytesEntry == null) { + return null; + } + BinaryEntry entry = this.convertEntry(bytesEntry); + + // Parse id + Id id = entry.id().origin(); + Id vid = id.edge() ? ((EdgeId) id).ownerVertexId() : id; + HugeVertex vertex = new HugeVertex(vid, VertexLabel.NONE); + + // Parse all properties and edges of a Vertex + Iterator iterator = entry.columns().iterator(); + for (int index = 0; iterator.hasNext(); index++) { + BackendColumn col = iterator.next(); + if (entry.type().isEdge()) { + // NOTE: the entry id type is vertex even if entry type is edge + // Parse vertex edges + this.parseColumn(col, vertex); + } else { + assert entry.type().isVertex(); + // Parse vertex properties + assert entry.columnsSize() >= 1 : entry.columnsSize(); + if (index == 0) { + this.parseVertex(col.value, vertex); + } else { + this.parseVertexOlap(col.value, vertex); + } + } + } + + return vertex; + } + + protected void parseVertexOlap(byte[] value, HugeVertex vertex) { + BytesBuffer buffer = BytesBuffer.wrap(value); + Id pkeyId = IdGenerator.of(buffer.readVInt()); + this.parseProperty(pkeyId, buffer, vertex); + } + + public BinaryEntry writeEdge(HugeEdge edge) { + BinaryEntry entry = newBackendEntry(edge); + byte[] name = this.keyWithIdPrefix ? + entry.id().asBytes() : BytesBuffer.BYTES_EMPTY; + byte[] value = this.formatEdgeValue(edge); + entry.column(name, value); + + return entry; + } + + public HugeEdge readEdge(BinaryEntry bytesEntry) { + HugeVertex vertex = this.readVertex(bytesEntry); + Collection edges = vertex.getEdges(); + if (edges.size() != 1) { + E.checkState(false, "Expect 1 edge in vertex, but got %s", edges.size()); + } + return edges.iterator().next(); + } + + public BinaryEntry writeId(HugeType type, Id id) { + return newBackendEntry(type, id); + } + + private BinaryId writeEdgeId(Id id) { + EdgeId edgeId; + if (id instanceof EdgeId) { + edgeId = (EdgeId) id; + } else { + edgeId = EdgeId.parse(id.asString()); + } + BytesBuffer buffer = BytesBuffer.allocate(BytesBuffer.BUF_EDGE_ID); + if (this.enablePartition) { + buffer.writeShort(getPartition(HugeType.EDGE, edgeId.ownerVertexId())); + buffer.writeEdgeId(edgeId); + } else { + buffer.writeEdgeId(edgeId); + } + return new BinaryId(buffer.bytes(), id); + } + + private void writePartitionedId(HugeType type, Id id, BytesBuffer buffer) { + if (this.enablePartition) { + buffer.writeShort(getPartition(type, id)); + buffer.writeId(id); + } else { + buffer.writeId(id); + } + } + + protected short getPartition(HugeType type, Id id) { + return 0; + } + + public BinaryEntry parse(BinaryEntry originEntry) { + byte[] bytes = originEntry.id().asBytes(); + BinaryEntry parsedEntry = new BinaryEntry(originEntry.type(), bytes, this.enablePartition); + + if (this.enablePartition) { + bytes = Arrays.copyOfRange(bytes, parsedEntry.id().length() + 2, bytes.length); + } else { + bytes = Arrays.copyOfRange(bytes, parsedEntry.id().length(), bytes.length); + } + BytesBuffer buffer = BytesBuffer.allocate(BytesBuffer.BUF_EDGE_ID); + buffer.write(parsedEntry.id().asBytes()); + buffer.write(bytes); + parsedEntry = new BinaryEntry(originEntry.type(), + new BinaryId(buffer.bytes(), BytesBuffer.wrap(buffer.bytes()).readEdgeId())); + + for (BackendColumn col : originEntry.columns()) { + parsedEntry.column(buffer.bytes(), col.value); + } + return parsedEntry; + } + + + protected static boolean indexIdLengthExceedLimit(Id id) { + return id.asBytes().length > BytesBuffer.INDEX_HASH_ID_THRESHOLD; + } + + protected static boolean indexFieldValuesUnmatched(byte[] value, Object fieldValues) { + if (value != null && value.length > 0 && fieldValues != null) { + return !StringEncoding.decode(value).equals(fieldValues); + } + return false; + } +} diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/backup/HugeEdge.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/backup/HugeEdge.java new file mode 100644 index 000000000..798a85eae --- /dev/null +++ b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/backup/HugeEdge.java @@ -0,0 +1,492 @@ +/* + * Copyright 2017 HugeGraph Authors + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.baidu.hugegraph.serializer.direct.struct; + +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; + +import org.apache.logging.log4j.util.Strings; +import org.apache.tinkerpop.gremlin.structure.Direction; +import org.apache.tinkerpop.gremlin.structure.Edge; +import org.apache.tinkerpop.gremlin.structure.Property; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.apache.tinkerpop.gremlin.structure.util.StringFactory; + +import com.baidu.hugegraph.HugeGraph; +import com.baidu.hugegraph.backend.id.EdgeId; +import com.baidu.hugegraph.backend.query.ConditionQuery; +import com.baidu.hugegraph.backend.query.QueryResults; +import com.baidu.hugegraph.perf.PerfUtil.Watched; +import com.baidu.hugegraph.schema.EdgeLabel; +import com.baidu.hugegraph.schema.PropertyKey; +import com.baidu.hugegraph.schema.VertexLabel; +import com.baidu.hugegraph.serializer.direct.BytesBuffer; +import com.baidu.hugegraph.serializer.direct.util.Id; +import com.baidu.hugegraph.serializer.direct.util.HugeException; +import com.baidu.hugegraph.type.HugeType; +import com.baidu.hugegraph.type.define.Directions; +import com.baidu.hugegraph.type.define.HugeKeys; +import com.baidu.hugegraph.util.E; +import com.google.common.collect.ImmutableList; + +public class HugeEdge extends HugeElement implements Edge, Cloneable { + + private Id id; + private String name; + + private HugeVertex sourceVertex; + private HugeVertex targetVertex; + private boolean isOutEdge; + + public HugeEdge(HugeVertex owner, Id id, EdgeLabel label, + HugeVertex other) { + this(owner.graph(), id, label); + this.fresh(true); + this.vertices(owner, other); + } + + public HugeEdge(final HugeGraph graph, Id id, String label) { + super(graph); + + E.checkArgumentNotNull(label, "Edge label can't be null"); + this.label = label; + + this.id = id; + this.name = null; + this.sourceVertex = null; + this.targetVertex = null; + this.isOutEdge = true; + } + + @Override + public HugeType type() { + // NOTE: we optimize the edge type that let it include direction + return this.isOutEdge ? HugeType.EDGE_OUT : HugeType.EDGE_IN; + } + + @Override + public EdgeId id() { + return (EdgeId) this.id; + } + + @Override + public EdgeLabel schemaLabel() { + assert this.graph().sameAs(this.label.graph()); + return this.label; + } + + @Override + public String name() { + if (this.name == null) { + List sortValues = this.sortValues(); + if (sortValues.isEmpty()) { + this.name = Strings.EMPTY; + } else { + this.name = ConditionQuery.concatValues(sortValues); + } + } + return this.name; + } + + public void name(String name) { + this.name = name; + } + + public boolean selfLoop() { + return this.sourceVertex != null && + this.sourceVertex == this.targetVertex; + } + + public Directions direction() { + return this.isOutEdge ? Directions.OUT : Directions.IN; + } + + public boolean matchDirection(Directions direction) { + if (direction == Directions.BOTH || this.selfLoop()) { + return true; + } + return this.isDirection(direction); + } + + public boolean isDirection(Directions direction) { + return this.isOutEdge && direction == Directions.OUT || + !this.isOutEdge && direction == Directions.IN; + } + + @Watched(prefix = "edge") + public void assignId() { + // Generate an id and assign + this.id = new EdgeId(this.ownerVertex(), this.direction(), + this.schemaLabel().id(), this.name(), + this.otherVertex()); + + if (this.fresh()) { + int len = this.id.length(); + E.checkArgument(len <= BytesBuffer.BIG_ID_LEN_MAX, + "The max length of edge id is %s, but got %s {%s}", + BytesBuffer.BIG_ID_LEN_MAX, len, this.id); + } + } + + @Watched(prefix = "edge") + public EdgeId idWithDirection() { + return ((EdgeId) this.id).directed(true); + } + + @Watched(prefix = "edge") + protected List sortValues() { + List sortKeys = this.schemaLabel().sortKeys(); + if (sortKeys.isEmpty()) { + return ImmutableList.of(); + } + List propValues = new ArrayList<>(sortKeys.size()); + for (Id sk : sortKeys) { + HugeProperty property = this.getProperty(sk); + E.checkState(property != null, + "The value of sort key '%s' can't be null", sk); + Object propValue = property.serialValue(true); + if (Strings.EMPTY.equals(propValue)) { + propValue = ConditionQuery.INDEX_VALUE_EMPTY; + } + propValues.add(propValue); + } + return propValues; + } + + @Override + public Property property(String key, V value) { + PropertyKey propertyKey = this.graph().propertyKey(key); + // Check key in edge label + E.checkArgument(this.label.properties().contains(propertyKey.id()), + "Invalid property '%s' for edge label '%s'", + key, this.label()); + // Sort-Keys can only be set once + if (this.schemaLabel().sortKeys().contains(propertyKey.id())) { + E.checkArgument(!this.hasProperty(propertyKey.id()), + "Can't update sort key: '%s'", key); + } + return this.addProperty(propertyKey, value, !this.fresh()); + } + + @Watched(prefix = "edge") + @Override + protected HugeEdgeProperty newProperty(PropertyKey pkey, V val) { + return new HugeEdgeProperty<>(this, pkey, val); + } + + @Watched(prefix = "edge") + @Override + protected boolean ensureFilledProperties(boolean throwIfNotExist) { + if (this.isPropLoaded()) { + this.updateToDefaultValueIfNone(); + return true; + } + + // Skip query if there is no any property key in schema + if (this.schemaLabel().properties().isEmpty()) { + this.propLoaded(); + return true; + } + + // Seems there is no scene to be here + Iterator edges = this.graph().edges(this.id()); + Edge edge = QueryResults.one(edges); + if (edge == null && !throwIfNotExist) { + return false; + } + E.checkState(edge != null, "Edge '%s' does not exist", this.id); + this.copyProperties((HugeEdge) edge); + this.updateToDefaultValueIfNone(); + return true; + } + + @Watched(prefix = "edge") + @SuppressWarnings("unchecked") // (Property) prop + @Override + public Iterator> properties(String... keys) { + this.ensureFilledProperties(true); + + // Capacity should be about the following size + int propsCapacity = keys.length == 0 ? + this.sizeOfProperties() : + keys.length; + List> props = new ArrayList<>(propsCapacity); + + if (keys.length == 0) { + for (HugeProperty prop : this.getProperties()) { + assert prop instanceof Property; + props.add((Property) prop); + } + } else { + for (String key : keys) { + Id pkeyId; + try { + pkeyId = this.graph().propertyKey(key).id(); + } catch (IllegalArgumentException ignored) { + continue; + } + HugeProperty prop = this.getProperty(pkeyId); + if (prop == null) { + // Not found + continue; + } + assert prop instanceof Property; + props.add((Property) prop); + } + } + return props.iterator(); + } + + @Override + public Object sysprop(HugeKeys key) { + switch (key) { + case ID: + return this.id(); + case OWNER_VERTEX: + return this.ownerVertex().id(); + case LABEL: + return this.schemaLabel().id(); + case DIRECTION: + return this.direction(); + case OTHER_VERTEX: + return this.otherVertex().id(); + case SORT_VALUES: + return this.name(); + case PROPERTIES: + return this.getPropertiesMap(); + default: + E.checkArgument(false, + "Invalid system property '%s' of Edge", key); + return null; + } + } + + @Override + public Iterator vertices(Direction direction) { + List vertices = new ArrayList<>(2); + switch (direction) { + case OUT: + vertices.add(this.sourceVertex()); + break; + case IN: + vertices.add(this.targetVertex()); + break; + case BOTH: + vertices.add(this.sourceVertex()); + vertices.add(this.targetVertex()); + break; + default: + throw new AssertionError("Unsupported direction: " + direction); + } + + return vertices.iterator(); + } + + @Override + public Vertex outVertex() { + return this.sourceVertex(); + } + + @Override + public Vertex inVertex() { + return this.targetVertex(); + } + + public void vertices(HugeVertex owner, HugeVertex other) { + Id ownerLabel = owner.schemaLabel().id(); + if (ownerLabel.equals(this.label.sourceLabel())) { + this.vertices(true, owner, other); + } else { + ownerLabel.equals(this.label.targetLabel()); + this.vertices(false, owner, other); + } + } + + public void vertices(boolean outEdge, HugeVertex owner, HugeVertex other) { + this.isOutEdge = outEdge; + if (this.isOutEdge) { + this.sourceVertex = owner; + this.targetVertex = other; + } else { + this.sourceVertex = other; + this.targetVertex = owner; + } + } + + @Watched + public HugeEdge switchOwner() { + HugeEdge edge = this.clone(); + edge.isOutEdge = !edge.isOutEdge; + edge.id = ((EdgeId) edge.id).switchDirection(); + return edge; + } + + public HugeEdge switchToOutDirection() { + if (this.direction() == Directions.IN) { + return this.switchOwner(); + } + return this; + } + + public HugeVertex ownerVertex() { + return this.isOutEdge ? this.sourceVertex() : this.targetVertex(); + } + + public HugeVertex sourceVertex() { + this.checkAdjacentVertexExist(this.sourceVertex); + return this.sourceVertex; + } + + public void sourceVertex(HugeVertex sourceVertex) { + this.sourceVertex = sourceVertex; + } + + public HugeVertex targetVertex() { + this.checkAdjacentVertexExist(this.targetVertex); + return this.targetVertex; + } + + public void targetVertex(HugeVertex targetVertex) { + this.targetVertex = targetVertex; + } + + private void checkAdjacentVertexExist(HugeVertex vertex) { + if (vertex.schemaLabel().undefined() && + this.graph().checkAdjacentVertexExist()) { + throw new HugeException("Vertex '%s' does not exist", vertex.id()); + } + } + + public boolean belongToLabels(String... edgeLabels) { + if (edgeLabels.length == 0) { + return true; + } + + // Does edgeLabels contain me + for (String label : edgeLabels) { + if (label.equals(this.label())) { + return true; + } + } + return false; + } + + public boolean belongToVertex(HugeVertex vertex) { + return vertex != null && (vertex.equals(this.sourceVertex) || + vertex.equals(this.targetVertex)); + } + + public HugeVertex otherVertex(HugeVertex vertex) { + if (vertex == this.sourceVertex()) { + return this.targetVertex(); + } else { + E.checkArgument(vertex == this.targetVertex(), + "Invalid argument vertex '%s', must be in [%s, %s]", + vertex, this.sourceVertex(), this.targetVertex()); + return this.sourceVertex(); + } + } + + public HugeVertex otherVertex() { + return this.isOutEdge ? this.targetVertex() : this.sourceVertex(); + } + + /** + * Clear properties of the edge, and set `removed` true + * @return a new edge + */ + public HugeEdge prepareRemoved() { + HugeEdge edge = this.clone(); + edge.removed(true); + edge.resetProperties(); + return edge; + } + + @Override + public HugeEdge copy() { + HugeEdge edge = this.clone(); + edge.copyProperties(this); + return edge; + } + + @Override + protected HugeEdge clone() { + try { + return (HugeEdge) super.clone(); + } catch (CloneNotSupportedException e) { + throw new HugeException("Failed to clone HugeEdge", e); + } + } + + @Override + public String toString() { + return StringFactory.edgeString(this); + } + + public static final EdgeId getIdValue(Object idValue, + boolean returnNullIfError) { + Id id = HugeElement.getIdValue(idValue); + if (id == null || id instanceof EdgeId) { + return (EdgeId) id; + } + return EdgeId.parse(id.asString(), returnNullIfError); + } + + @Watched + public static HugeEdge constructEdge(HugeVertex ownerVertex, + boolean isOutEdge, + EdgeLabel edgeLabel, + String sortValues, + Id otherVertexId) { + HugeGraph graph = ownerVertex.graph(); + VertexLabel srcLabel = graph.vertexLabelOrNone(edgeLabel.sourceLabel()); + VertexLabel tgtLabel = graph.vertexLabelOrNone(edgeLabel.targetLabel()); + + VertexLabel otherVertexLabel; + if (isOutEdge) { + ownerVertex.correctVertexLabel(srcLabel); + otherVertexLabel = tgtLabel; + } else { + ownerVertex.correctVertexLabel(tgtLabel); + otherVertexLabel = srcLabel; + } + HugeVertex otherVertex = new HugeVertex(graph, otherVertexId, + otherVertexLabel); + + ownerVertex.propNotLoaded(); + otherVertex.propNotLoaded(); + + HugeEdge edge = new HugeEdge(graph, null, edgeLabel); + edge.name(sortValues); + edge.vertices(isOutEdge, ownerVertex, otherVertex); + edge.assignId(); + + if (isOutEdge) { + ownerVertex.addOutEdge(edge); + otherVertex.addInEdge(edge.switchOwner()); + } else { + ownerVertex.addInEdge(edge); + otherVertex.addOutEdge(edge.switchOwner()); + } + + return edge; + } +} diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/backup/HugeElement.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/backup/HugeElement.java new file mode 100644 index 000000000..723cbc2d2 --- /dev/null +++ b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/backup/HugeElement.java @@ -0,0 +1,410 @@ +/* + * Copyright 2017 HugeGraph Authors + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.baidu.hugegraph.serializer.direct.struct; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.UUID; +import java.util.function.Supplier; + +import org.apache.tinkerpop.gremlin.structure.Property; +import org.apache.tinkerpop.gremlin.structure.T; +import org.apache.tinkerpop.gremlin.structure.util.ElementHelper; +import org.eclipse.collections.api.iterator.IntIterator; +import org.eclipse.collections.api.map.primitive.MutableIntObjectMap; + +import com.baidu.hugegraph.backend.id.EdgeId; +import com.baidu.hugegraph.backend.id.IdGenerator; +import com.baidu.hugegraph.perf.PerfUtil.Watched; +import com.baidu.hugegraph.schema.PropertyKey; +import com.baidu.hugegraph.schema.SchemaLabel; +import com.baidu.hugegraph.serializer.direct.BytesBuffer; +import com.baidu.hugegraph.serializer.direct.util.Id; +import com.baidu.hugegraph.structure.Element; +import com.baidu.hugegraph.type.HugeType; +import com.baidu.hugegraph.type.Idfiable; +import com.baidu.hugegraph.type.define.Cardinality; +import com.baidu.hugegraph.util.CollectionUtil; +import com.baidu.hugegraph.util.E; +import com.baidu.hugegraph.util.InsertionOrderUtil; +import com.baidu.hugegraph.util.collection.CollectionFactory; + +public abstract class HugeElement implements Element, GraphType, Idfiable { + + private static final MutableIntObjectMap> EMPTY_MAP = + CollectionFactory.newIntObjectMap(); + private static final int MAX_PROPERTIES = BytesBuffer.UINT16_MAX; + + private MutableIntObjectMap> properties; + + String label; + + public HugeElement() { + this.properties = EMPTY_MAP; + } + + public String label() { + return this.label; + } + + public abstract SchemaLabel schemaLabel(); + + protected abstract HugeProperty newProperty(PropertyKey pk, V val); + + protected abstract void onUpdateProperty(Cardinality cardinality, + HugeProperty prop); + + protected abstract boolean ensureFilledProperties(boolean throwIfNotExist); + + public Set getPropertyKeys() { + Set propKeys = InsertionOrderUtil.newSet(); + IntIterator keys = this.properties.keysView().intIterator(); + while (keys.hasNext()) { + propKeys.add(IdGenerator.of(keys.next())); + } + return propKeys; + } + + public Collection> getProperties() { + return this.properties.values(); + } + + public Collection> getFilledProperties() { + this.ensureFilledProperties(true); + return this.getProperties(); + } + + public Map getPropertiesMap() { + Map props = InsertionOrderUtil.newMap(); + for (HugeProperty prop : this.properties.values()) { + props.put(prop.propertyKey().id(), prop.value()); + } + // TODO: return MutableIntObjectMap for this method? + return props; + } + + public Collection> getAggregateProperties() { + List> aggrProps = InsertionOrderUtil.newList(); + for (HugeProperty prop : this.properties.values()) { + if (prop.type().isAggregateProperty()) { + aggrProps.add(prop); + } + } + return aggrProps; + } + + @SuppressWarnings("unchecked") + public HugeProperty getProperty(Id key) { + return (HugeProperty) this.properties.get(intFromId(key)); + } + + @SuppressWarnings("unchecked") + public V getPropertyValue(Id key) { + HugeProperty prop = this.properties.get(intFromId(key)); + if (prop == null) { + return null; + } + return (V) prop.value(); + } + + public boolean hasProperty(Id key) { + return this.properties.containsKey(intFromId(key)); + } + + public boolean hasProperties() { + return this.properties.size() > 0; + } + + public int sizeOfProperties() { + return this.properties.size(); + } + + public int sizeOfSubProperties() { + int size = 0; + for (HugeProperty p : this.properties.values()) { + size++; + if (p.propertyKey().cardinality() != Cardinality.SINGLE && + p.value() instanceof Collection) { + size += ((Collection) p.value()).size(); + } + } + return size; + } + + @Watched(prefix = "element") + public HugeProperty setProperty(HugeProperty prop) { + if (this.properties == EMPTY_MAP) { + this.properties = CollectionFactory.newIntObjectMap(); + } + PropertyKey pkey = prop.propertyKey(); + + E.checkArgument(this.properties.containsKey(intFromId(pkey.id())) || + this.properties.size() < MAX_PROPERTIES, + "Exceeded the maximum number of properties"); + return this.properties.put(intFromId(pkey.id()), prop); + } + + public HugeProperty removeProperty(Id key) { + return this.properties.remove(intFromId(key)); + } + + public HugeProperty addProperty(PropertyKey pkey, V value) { + return this.addProperty(pkey, value, false); + } + + @Watched(prefix = "element") + public HugeProperty addProperty(PropertyKey pkey, V value, + boolean notify) { + HugeProperty prop = null; + switch (pkey.cardinality()) { + case SINGLE: + prop = this.newProperty(pkey, value); + if (notify) { + /* + * NOTE: this method should be called before setProperty() + * because tx need to delete index without the new property + */ + this.onUpdateProperty(pkey.cardinality(), prop); + } + this.setProperty(prop); + break; + case SET: + prop = this.addProperty(pkey, value, HashSet::new); + if (notify) { + this.onUpdateProperty(pkey.cardinality(), prop); + } + break; + case LIST: + prop = this.addProperty(pkey, value, ArrayList::new); + if (notify) { + this.onUpdateProperty(pkey.cardinality(), prop); + } + break; + default: + assert false; + break; + } + return prop; + } + + @Watched(prefix = "element") + @SuppressWarnings({ "rawtypes", "unchecked" }) + private HugeProperty addProperty(PropertyKey pkey, V value, + Supplier> supplier) { + assert pkey.cardinality().multiple(); + HugeProperty> property; + if (this.hasProperty(pkey.id())) { + property = this.getProperty(pkey.id()); + } else { + property = this.newProperty(pkey, supplier.get()); + this.setProperty(property); + } + + Collection values; + if (pkey.cardinality() == Cardinality.SET) { + if (value instanceof Set) { + values = (Set) value; + } else { + values = CollectionUtil.toSet(value); + } + } else { + assert pkey.cardinality() == Cardinality.LIST; + if (value instanceof List) { + values = (List) value; + } else { + values = CollectionUtil.toList(value); + } + } + property.value().addAll(pkey.validValueOrThrow(values)); + + // Any better ways? + return property; + } + + public void resetProperties() { + this.properties = CollectionFactory.newIntObjectMap(); + this.propLoaded = false; + } + + protected void copyProperties(HugeElement element) { + if (element.properties == EMPTY_MAP) { + this.properties = EMPTY_MAP; + } else { + this.properties = CollectionFactory.newIntObjectMap( + element.properties); + } + this.propLoaded = true; + } + + public HugeElement copyAsFresh() { + HugeElement elem = this.copy(); + elem.fresh = true; + return elem; + } + + public abstract HugeElement copy(); + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof Element)) { + return false; + } + + Element other = (Element) obj; + if (this.id() == null) { + return false; + } + + return this.id().equals(other.id()); + } + + /** + * Classify parameter list(pairs) from call request + * @param keyValues The property key-value pair of the vertex or edge + * @return Key-value pairs that are classified and processed + */ + @Watched(prefix = "element") + public static final ElementKeys classifyKeys(Object... keyValues) { + ElementKeys elemKeys = new ElementKeys(); + + if ((keyValues.length & 1) == 1) { + throw Element.Exceptions.providedKeyValuesMustBeAMultipleOfTwo(); + } + for (int i = 0; i < keyValues.length; i = i + 2) { + Object key = keyValues[i]; + Object val = keyValues[i + 1]; + + if (!(key instanceof String) && !(key instanceof T)) { + throw Element.Exceptions + .providedKeyValuesMustHaveALegalKeyOnEvenIndices(); + } + if (val == null) { + throw Property.Exceptions.propertyDoesNotExist(); + } + + if (key.equals(T.id)) { + elemKeys.id = val; + } else if (key.equals(T.label)) { + elemKeys.label = val; + } else { + elemKeys.keys.add(key.toString()); + } + } + return elemKeys; + } + + public static final Id getIdValue(HugeType type, Object idValue) { + assert type.isGraph(); + Id id = getIdValue(idValue); + if (type.isVertex()) { + return id; + } else { + if (id == null || id instanceof EdgeId) { + return id; + } + return EdgeId.parse(id.asString()); + } + } + + @Watched(prefix = "element") + protected static Id getIdValue(Object idValue) { + if (idValue == null) { + return null; + } + + if (idValue instanceof String) { + // String id + return IdGenerator.of((String) idValue); + } else if (idValue instanceof Number) { + // Long id + return IdGenerator.of(((Number) idValue).longValue()); + } else if (idValue instanceof UUID) { + // UUID id + return IdGenerator.of((UUID) idValue); + } else if (idValue instanceof Id) { + // Id itself + return (Id) idValue; + } else if (idValue instanceof Element) { + // Element + return (Id) ((Element) idValue).id(); + } + + // Throw if error type + throw new UnsupportedOperationException(String.format( + "Invalid element id: %s(%s)", + idValue, idValue.getClass().getSimpleName())); + } + + @Watched(prefix = "element") + public static final Object getLabelValue(Object... keyValues) { + Object labelValue = null; + for (int i = 0; i < keyValues.length; i = i + 2) { + if (keyValues[i].equals(T.label)) { + labelValue = keyValues[i + 1]; + if (labelValue instanceof String) { + ElementHelper.validateLabel((String) labelValue); + } + break; + } + } + return labelValue; + } + + public static int intFromId(Id id) { + E.checkArgument(id instanceof IdGenerator.LongId, + "Can't get number from %s(%s)", id, id.getClass()); + return ((IdGenerator.LongId) id).intValue(); + } + + public static final class ElementKeys { + + private Object label = null; + private Object id = null; + private Set keys = new HashSet<>(); + + public Object label() { + return this.label; + } + + public void label(Object label) { + this.label = label; + } + + public Object id() { + return this.id; + } + + public void id(Object id) { + this.id = id; + } + + public Set keys() { + return this.keys; + } + + public void keys(Set keys) { + this.keys = keys; + } + } +} diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/backup/HugeVertex.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/backup/HugeVertex.java new file mode 100644 index 000000000..02d02a51f --- /dev/null +++ b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/backup/HugeVertex.java @@ -0,0 +1,386 @@ +/* + * Copyright 2017 HugeGraph Authors + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.baidu.hugegraph.serializer.direct.struct; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; + +import org.apache.logging.log4j.util.Strings; +import org.apache.tinkerpop.gremlin.structure.Direction; +import org.apache.tinkerpop.gremlin.structure.Edge; +import org.apache.tinkerpop.gremlin.structure.T; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.apache.tinkerpop.gremlin.structure.VertexProperty; +import org.glassfish.jersey.internal.guava.Sets; + +import com.baidu.hugegraph.backend.id.IdGenerator; +import com.baidu.hugegraph.backend.query.ConditionQuery; +import com.baidu.hugegraph.backend.query.QueryResults; +import com.baidu.hugegraph.config.CoreOptions; +import com.baidu.hugegraph.perf.PerfUtil.Watched; +import com.baidu.hugegraph.schema.PropertyKey; +import com.baidu.hugegraph.schema.VertexLabel; +import com.baidu.hugegraph.serializer.direct.BytesBuffer; +import com.baidu.hugegraph.serializer.direct.util.Id; +import com.baidu.hugegraph.serializer.direct.util.SplicingIdGenerator; +import com.baidu.hugegraph.serializer.direct.util.HugeException; +import com.baidu.hugegraph.type.define.Cardinality; +import com.baidu.hugegraph.type.define.Directions; +import com.baidu.hugegraph.type.define.IdStrategy; +import com.baidu.hugegraph.util.E; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; + +/** + * We need a simple vertex struct for direct encode + * 1. id (only support number + string type) & treat as bytes + * 2. property, save a map + * + * So how could we deal with multi situation? (we judge before construct it), like: + * 1. idStrategy should be set / check before load + * 2. edges should do only in HugeEdge + * 3. any transaction actions should be ignored + **/ +public class HugeVertex extends HugeElement implements Vertex, Cloneable { + + private static final List EMPTY_LIST = ImmutableList.of(); + + private Id id; + protected Collection edges; + + public HugeVertex(Id id, String label) { + + E.checkArgumentNotNull(label, "Vertex label can't be null"); + this.label = label; + + this.id = id; + this.edges = EMPTY_LIST; + if (this.id != null) { + this.checkIdLength(); + } + } + + + @Override + public Id id() { + return this.id; + } + + @Watched(prefix = "vertex") + public void assignId(Id id) { + IdStrategy strategy = this.label.idStrategy(); + // Generate an id and assign + switch (strategy) { + case CUSTOMIZE_STRING: + assert !id.number(); + this.id = id; + break; + case CUSTOMIZE_NUMBER: + assert id.number(); + this.id = id; + break; + case CUSTOMIZE_UUID: + this.id = id.uuid() ? id : IdGenerator.of(id.asString(), true); + break; + case PRIMARY_KEY: + this.id = SplicingIdGenerator.instance().generate(this); + break; + default: + throw new HugeException("Unknown id strategy" + strategy); + } + this.checkIdLength(); + } + + protected void checkIdLength() { + assert this.id != null; + int len = this.id.asBytes().length; + if (len > BytesBuffer.ID_LEN_MAX) { + throw new HugeException("The max length of vertex id is 128, but got" + len); + } + + } + + public void correctVertexLabel(VertexLabel correctLabel) { + E.checkArgumentNotNull(correctLabel, "Vertex label can't be null"); + if (this.label != null && !this.label.undefined() && + !correctLabel.undefined()) { + E.checkArgument(this.label.equals(correctLabel), + "Vertex label can't be changed from '%s' to '%s'", + this.label, correctLabel); + } + this.label = correctLabel; + } + + @Watched(prefix = "vertex") + protected List primaryValues() { + E.checkArgument(this.label.idStrategy() == IdStrategy.PRIMARY_KEY, + "The id strategy '%s' don't have primary keys", + this.label.idStrategy()); + List primaryKeys = this.label.primaryKeys(); + E.checkArgument(!primaryKeys.isEmpty(), + "Primary key can't be empty for id strategy '%s'", + IdStrategy.PRIMARY_KEY); + + boolean encodeNumber = this.graph() + .option(CoreOptions.VERTEX_ENCODE_PK_NUMBER); + List propValues = new ArrayList<>(primaryKeys.size()); + for (Id pk : primaryKeys) { + HugeProperty property = this.getProperty(pk); + E.checkState(property != null, + "The value of primary key '%s' can't be null", + this.graph().propertyKey(pk).name()); + Object propValue = property.serialValue(encodeNumber); + if (Strings.EMPTY.equals(propValue)) { + propValue = ConditionQuery.INDEX_VALUE_EMPTY; + } + propValues.add(propValue); + } + return propValues; + } + + public boolean existsEdges() { + return this.edges.size() > 0; + } + + public Collection getEdges() { + return Collections.unmodifiableCollection(this.edges); + } + + public void addEdge(HugeEdge edge) { + if (this.edges == EMPTY_LIST) { + this.edges = newList(); + } + this.edges.add(edge); + } + + /** + * Add edge with direction OUT + * @param edge the out edge + */ + @Watched + public void addOutEdge(HugeEdge edge) { + if (edge.ownerVertex() == null) { + edge.sourceVertex(this); + } + E.checkState(edge.isDirection(Directions.OUT), + "The owner vertex('%s') of OUT edge '%s' should be '%s'", + edge.ownerVertex().id(), edge, this.id()); + this.addEdge(edge); + } + + /** + * Add edge with direction IN + * @param edge the in edge + */ + @Watched + public void addInEdge(HugeEdge edge) { + if (edge.ownerVertex() == null) { + edge.targetVertex(this); + } + E.checkState(edge.isDirection(Directions.IN), + "The owner vertex('%s') of IN edge '%s' should be '%s'", + edge.ownerVertex().id(), edge, this.id()); + this.addEdge(edge); + } + + public Iterator getEdges(Directions direction, String... edgeLabels) { + List list = new LinkedList<>(); + for (HugeEdge edge : this.edges) { + if (edge.matchDirection(direction) && + edge.belongToLabels(edgeLabels)) { + list.add(edge); + } + } + return list.iterator(); + } + + public Iterator getVertices(Directions direction, + String... edgeLabels) { + List list = new LinkedList<>(); + Iterator edges = this.getEdges(direction, edgeLabels); + while (edges.hasNext()) { + HugeEdge edge = (HugeEdge) edges.next(); + list.add(edge.otherVertex(this)); + } + return list.iterator(); + } + + @Watched(prefix = "vertex") + @Override + public Iterator vertices(Direction direction, + String... edgeLabels) { + Iterator edges = this.edges(direction, edgeLabels); + return this.graph().adjacentVertices(edges); + } + + @Watched(prefix = "vertex") + @Override + public VertexProperty property( + VertexProperty.Cardinality cardinality, + String key, V value, Object... objects) { + if (objects.length != 0 && objects[0].equals(T.id)) { + throw VertexProperty.Exceptions.userSuppliedIdsNotSupported(); + } + // TODO: extra props: objects + if (objects.length != 0) { + throw VertexProperty.Exceptions.metaPropertiesNotSupported(); + } + + PropertyKey propertyKey = this.graph().propertyKey(key); + /* + * g.AddV("xxx").property("key1", val1).property("key2", val2) + * g.AddV("xxx").property(single, "key1", val1) + * .property(list, "key2", val2) + * + * The cardinality single may be user supplied single, it may also be + * that user doesn't supplied cardinality, when it is latter situation, + * we shouldn't check it. Because of this reason, we are forced to + * give up the check of user supplied cardinality single. + * The cardinality not single must be user supplied, so should check it + */ + if (cardinality != VertexProperty.Cardinality.single) { + E.checkArgument(propertyKey.cardinality() == + Cardinality.convert(cardinality), + "Invalid cardinality '%s' for property key '%s', " + + "expect '%s'", cardinality, key, + propertyKey.cardinality().string()); + } + + // Check key in vertex label + E.checkArgument(VertexLabel.OLAP_VL.equals(this.label) || + this.label.properties().contains(propertyKey.id()), + "Invalid property '%s' for vertex label '%s'", + key, this.label); + // Primary-Keys can only be set once + if (this.schemaLabel().primaryKeys().contains(propertyKey.id())) { + E.checkArgument(!this.hasProperty(propertyKey.id()), + "Can't update primary key: '%s'", key); + } + + @SuppressWarnings("unchecked") + VertexProperty prop = (VertexProperty) this.addProperty( + propertyKey, value, !this.fresh()); + return prop; + } + + @Watched(prefix = "vertex") + @Override + protected HugeVertexProperty newProperty(PropertyKey pkey, V val) { + return new HugeVertexProperty<>(this, pkey, val); + } + + @Watched(prefix = "vertex") + @Override + protected boolean ensureFilledProperties(boolean throwIfNotExist) { + if (this.isPropLoaded()) { + this.updateToDefaultValueIfNone(); + return true; + } + + // Skip query if there is no any property key in schema + if (this.schemaLabel().properties().isEmpty()) { + this.propLoaded(); + return true; + } + + // NOTE: only adjacent vertex will reach here + Iterator vertices = this.graph().adjacentVertex(this.id()); + HugeVertex vertex = (HugeVertex) QueryResults.one(vertices); + if (vertex == null && !throwIfNotExist) { + return false; + } + E.checkState(vertex != null, "Vertex '%s' does not exist", this.id); + if (vertex.schemaLabel().undefined() || + !vertex.schemaLabel().equals(this.schemaLabel())) { + // Update vertex label of dangling edge to undefined + this.correctVertexLabel(VertexLabel.undefined(this.graph())); + vertex.resetProperties(); + } + this.copyProperties(vertex); + this.updateToDefaultValueIfNone(); + return true; + } + + @Watched(prefix = "vertex") + @SuppressWarnings("unchecked") // (VertexProperty) prop + @Override + public Iterator> properties(String... keys) { + // TODO: Compatible with TinkerPop properties() (HugeGraph-742) + this.ensureFilledProperties(true); + + // Capacity should be about the following size + int propsCapacity = keys.length == 0 ? + this.sizeOfProperties() : + keys.length; + List> props = new ArrayList<>(propsCapacity); + + if (keys.length == 0) { + for (HugeProperty prop : this.getProperties()) { + assert prop instanceof VertexProperty; + props.add((VertexProperty) prop); + } + } else { + for (String key : keys) { + Id pkeyId; + try { + pkeyId = this.graph().propertyKey(key).id(); + } catch (IllegalArgumentException ignored) { + continue; + } + HugeProperty prop = this.getProperty(pkeyId); + if (prop == null) { + // Not found + continue; + } + assert prop instanceof VertexProperty; + props.add((VertexProperty) prop); + } + } + + return props.iterator(); + } + + public boolean valid() { + try { + return this.ensureFilledProperties(false); + } catch (Throwable e) { + // Generally the program can't get here + return false; + } + } + + public static final Id getIdValue(Object idValue) { + return HugeElement.getIdValue(idValue); + } + + // we don't use origin sets now + private static Set newSet() { + return Sets.newHashSet(); + } + + private static List newList() { + return Lists.newArrayList(); + } +} diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/backup/OldSerializer.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/backup/OldSerializer.java new file mode 100644 index 000000000..b388eb9f0 --- /dev/null +++ b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/backup/OldSerializer.java @@ -0,0 +1,1294 @@ +/* + * Copyright 2017 HugeGraph Authors + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.baidu.hugegraph.serializer.direct.backup; + +import static com.baidu.hugegraph.serializer.direct.util.Id.*; +import static com.baidu.hugegraph.serializer.direct.util.Id.IdType.*; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +import org.apache.commons.lang.NotImplementedException; + +import com.baidu.hugegraph.HugeGraph; +import com.baidu.hugegraph.backend.BackendException; +import com.baidu.hugegraph.backend.id.EdgeId; +import com.baidu.hugegraph.backend.id.Id; +import com.baidu.hugegraph.backend.id.IdGenerator; +import com.baidu.hugegraph.backend.page.PageState; +import com.baidu.hugegraph.backend.query.Condition; +import com.baidu.hugegraph.backend.query.Condition.RangeConditions; +import com.baidu.hugegraph.backend.query.ConditionQuery; +import com.baidu.hugegraph.backend.query.IdPrefixQuery; +import com.baidu.hugegraph.backend.query.IdRangeQuery; +import com.baidu.hugegraph.backend.query.Query; +import com.baidu.hugegraph.backend.serializer.BinaryBackendEntry.BinaryId; +import com.baidu.hugegraph.backend.store.BackendEntry; +import com.baidu.hugegraph.backend.store.BackendEntry.BackendColumn; +import com.baidu.hugegraph.config.HugeConfig; +import com.baidu.hugegraph.schema.EdgeLabel; +import com.baidu.hugegraph.schema.IndexLabel; +import com.baidu.hugegraph.schema.PropertyKey; +import com.baidu.hugegraph.schema.SchemaElement; +import com.baidu.hugegraph.schema.VertexLabel; +import com.baidu.hugegraph.serializer.direct.BinaryBackendEntry; +import com.baidu.hugegraph.serializer.direct.BytesBuffer; +import com.baidu.hugegraph.serializer.direct.DataType; +import com.baidu.hugegraph.serializer.direct.HugeType; +import com.baidu.hugegraph.serializer.direct.struct.HugeEdge; +import com.baidu.hugegraph.serializer.direct.struct.HugeElement; +import com.baidu.hugegraph.serializer.direct.struct.HugeVertex; +import com.baidu.hugegraph.serializer.direct.util.Id; +import com.baidu.hugegraph.serializer.direct.util.StringEncoding; +import com.baidu.hugegraph.structure.HugeEdge; +import com.baidu.hugegraph.structure.HugeEdgeProperty; +import com.baidu.hugegraph.structure.HugeElement; +import com.baidu.hugegraph.structure.HugeIndex; +import com.baidu.hugegraph.structure.HugeProperty; +import com.baidu.hugegraph.structure.HugeVertex; +import com.baidu.hugegraph.structure.HugeVertexProperty; +import com.baidu.hugegraph.type.HugeType; +import com.baidu.hugegraph.type.define.AggregateType; +import com.baidu.hugegraph.type.define.Cardinality; +import com.baidu.hugegraph.type.define.Directions; +import com.baidu.hugegraph.type.define.Frequency; +import com.baidu.hugegraph.type.define.HugeKeys; +import com.baidu.hugegraph.type.define.IdStrategy; +import com.baidu.hugegraph.type.define.IndexType; +import com.baidu.hugegraph.type.define.SchemaStatus; +import com.baidu.hugegraph.type.define.WriteType; +import com.baidu.hugegraph.util.Bytes; +import com.baidu.hugegraph.util.E; +import com.baidu.hugegraph.util.JsonUtil; +import com.baidu.hugegraph.util.NumericUtil; + +import sun.plugin2.message.AbstractSerializer; + +public class OldSerializer extends AbstractSerializer { + + /* + * Id is stored in column name if keyWithIdPrefix=true like RocksDB, + * else stored in rowkey like HBase. + */ + private final boolean keyWithIdPrefix; + private final boolean indexWithIdPrefix; + private final boolean enablePartition; + + public OldSerializer() { + this(true, true, false); + } + + public OldSerializer(HugeConfig config) { + this(true, true, false); + } + + public OldSerializer(boolean keyWithIdPrefix, + boolean indexWithIdPrefix, + boolean enablePartition) { + this.keyWithIdPrefix = keyWithIdPrefix; + this.indexWithIdPrefix = indexWithIdPrefix; + this.enablePartition = enablePartition; + } + + @Override + protected BinaryBackendEntry newBackendEntry(HugeType type, Id id) { + if (type.isVertex()) { + BytesBuffer buffer = BytesBuffer.allocate(2 + 1 + id.length()); + writePartitionedId(HugeType.VERTEX, id, buffer); + return new BinaryBackendEntry(type, new BinaryId(buffer.bytes(), id)); + } + + if (type.isEdge()) { + E.checkState(id instanceof BinaryId, + "Expect a BinaryId for BackendEntry with edge id"); + return new BinaryBackendEntry(type, (BinaryId) id); + } + + if (type.isIndex()) { + BytesBuffer buffer = BytesBuffer.allocate(1 + id.length()); + byte[] idBytes = buffer.writeIndexId(id, type).bytes(); + return new BinaryBackendEntry(type, new BinaryId(idBytes, id)); + } + + BytesBuffer buffer = BytesBuffer.allocate(1 + id.length()); + byte[] idBytes = buffer.writeId(id).bytes(); + return new BinaryBackendEntry(type, new BinaryId(idBytes, id)); + } + + protected final BinaryBackendEntry newBackendEntry(HugeVertex vertex) { + return newBackendEntry(vertex.type(), vertex.id()); + } + + protected final BinaryBackendEntry newBackendEntry(HugeEdge edge) { + BinaryId id = writeEdgeId(edge.idWithDirection()); + return newBackendEntry(edge.type(), id); + } + + protected final BinaryBackendEntry newBackendEntry(SchemaElement elem) { + return newBackendEntry(elem.type(), elem.id()); + } + + protected byte[] formatSyspropName(Id id, HugeKeys col) { + int idLen = this.keyWithIdPrefix ? 1 + id.length() : 0; + BytesBuffer buffer = BytesBuffer.allocate(idLen + 1 + 1); + byte sysprop = HugeType.SYS_PROPERTY.code(); + if (this.keyWithIdPrefix) { + buffer.writeId(id); + } + return buffer.write(sysprop).write(col.code()).bytes(); + } + + protected byte[] formatSyspropName(BinaryId id, HugeKeys col) { + int idLen = this.keyWithIdPrefix ? id.length() : 0; + BytesBuffer buffer = BytesBuffer.allocate(idLen + 1 + 1); + byte sysprop = HugeType.SYS_PROPERTY.code(); + if (this.keyWithIdPrefix) { + buffer.write(id.asBytes()); + } + return buffer.write(sysprop).write(col.code()).bytes(); + } + + protected BackendColumn formatLabel(HugeElement elem) { + BackendColumn col = new BackendColumn(); + col.name = this.formatSyspropName(elem.id(), HugeKeys.LABEL); + Id label = elem.schemaLabel().id(); + BytesBuffer buffer = BytesBuffer.allocate(label.length() + 1); + col.value = buffer.writeId(label).bytes(); + return col; + } + + protected byte[] formatPropertyName(HugeProperty prop) { + Id id = prop.element().id(); + int idLen = this.keyWithIdPrefix ? 1 + id.length() : 0; + Id pkeyId = prop.propertyKey().id(); + BytesBuffer buffer = BytesBuffer.allocate(idLen + 2 + pkeyId.length()); + if (this.keyWithIdPrefix) { + buffer.writeId(id); + } + buffer.write(prop.type().code()); + buffer.writeId(pkeyId); + return buffer.bytes(); + } + + protected BackendColumn formatProperty(HugeProperty prop) { + BytesBuffer buffer = BytesBuffer.allocate(BytesBuffer.BUF_PROPERTY); + buffer.writeProperty(prop.propertyKey(), prop.value()); + return BackendColumn.of(this.formatPropertyName(prop), buffer.bytes()); + } + + protected void parseProperty(Id pkeyId, BytesBuffer buffer, + HugeElement owner) { + PropertyKey pkey = owner.graph().propertyKey(pkeyId); + + // Parse value + Object value = buffer.readProperty(pkey); + + // Set properties of vertex/edge + if (pkey.cardinality() == Cardinality.SINGLE) { + owner.addProperty(pkey, value); + } else { + if (!(value instanceof Collection)) { + throw new BackendException( + "Invalid value of non-single property: %s", value); + } + owner.addProperty(pkey, value); + } + } + + protected void formatProperties(Collection> props, + BytesBuffer buffer) { + // Write properties size + buffer.writeVInt(props.size()); + + // Write properties data + for (HugeProperty property : props) { + PropertyKey pkey = property.propertyKey(); + buffer.writeVInt(SchemaElement.schemaId(pkey.id())); + buffer.writeProperty(pkey, property.value()); + } + } + + protected void parseProperties(BytesBuffer buffer, HugeElement owner) { + int size = buffer.readVInt(); + assert size >= 0; + for (int i = 0; i < size; i++) { + Id pkeyId = IdGenerator.of(buffer.readVInt()); + this.parseProperty(pkeyId, buffer, owner); + } + } + + protected void formatExpiredTime(long expiredTime, BytesBuffer buffer) { + buffer.writeVLong(expiredTime); + } + + protected void parseExpiredTime(BytesBuffer buffer, HugeElement element) { + element.expiredTime(buffer.readVLong()); + } + + protected byte[] formatEdgeValue(HugeEdge edge) { + int propsCount = edge.sizeOfProperties(); + BytesBuffer buffer = BytesBuffer.allocate(4 + 16 * propsCount); + + // Write edge id + //buffer.writeId(edge.id()); + + // Write edge properties + this.formatProperties(edge.getProperties(), buffer); + + // Write edge expired time if needed + if (edge.hasTtl()) { + this.formatExpiredTime(edge.expiredTime(), buffer); + } + + return buffer.bytes(); + } + + protected void parseEdge(BackendColumn col, HugeVertex vertex, + HugeGraph graph) { + // owner-vertex + dir + edge-label + sort-values + other-vertex + + BytesBuffer buffer = BytesBuffer.wrap(col.name); + if (this.keyWithIdPrefix) { + // Consume owner-vertex id + buffer.readId(); + } + byte type = buffer.read(); + Id labelId = buffer.readId(); + String sortValues = buffer.readStringWithEnding(); + Id otherVertexId = buffer.readId(); + + boolean direction = EdgeId.isOutDirectionFromCode(type); + EdgeLabel edgeLabel = graph.edgeLabelOrNone(labelId); + + // Construct edge + HugeEdge edge = HugeEdge.constructEdge(vertex, direction, edgeLabel, + sortValues, otherVertexId); + + // Parse edge-id + edge-properties + buffer = BytesBuffer.wrap(col.value); + + //Id id = buffer.readId(); + + // Parse edge properties + this.parseProperties(buffer, edge); + + // Parse edge expired time if needed + if (edge.hasTtl()) { + this.parseExpiredTime(buffer, edge); + } + } + + protected void parseVertex(byte[] value, HugeVertex vertex) { + BytesBuffer buffer = BytesBuffer.wrap(value); + + // Parse vertex label + VertexLabel label = vertex.graph().vertexLabelOrNone(buffer.readId()); + vertex.correctVertexLabel(label); + + // Parse properties + this.parseProperties(buffer, vertex); + + // Parse vertex expired time if needed + if (vertex.hasTtl()) { + this.parseExpiredTime(buffer, vertex); + } + } + + protected void parseColumn(BackendColumn col, HugeVertex vertex) { + BytesBuffer buffer = BytesBuffer.wrap(col.name); + Id id = this.keyWithIdPrefix ? buffer.readId() : vertex.id(); + E.checkState(buffer.remaining() > 0, "Missing column type"); + byte type = buffer.read(); + // Parse property + if (type == HugeType.PROPERTY.code()) { + Id pkeyId = buffer.readId(); + this.parseProperty(pkeyId, BytesBuffer.wrap(col.value), vertex); + } + // Parse edge + else if (type == HugeType.EDGE_IN.code() || + type == HugeType.EDGE_OUT.code()) { + this.parseEdge(col, vertex, vertex.graph()); + } + // Parse system property + else if (type == HugeType.SYS_PROPERTY.code()) { + // pass + } + // Invalid entry + else { + E.checkState(false, "Invalid entry(%s) with unknown type(%s): 0x%s", + id, type & 0xff, Bytes.toHex(col.name)); + } + } + + protected byte[] formatIndexName(HugeIndex index) { + BytesBuffer buffer; + Id elemId = index.elementId(); + if (!this.indexWithIdPrefix) { + int idLen = 1 + elemId.length(); + buffer = BytesBuffer.allocate(idLen); + } else { + Id indexId = index.id(); + HugeType type = index.type(); + if (!type.isNumericIndex() && indexIdLengthExceedLimit(indexId)) { + indexId = index.hashId(); + } + int idLen = 1 + elemId.length() + 1 + indexId.length(); + buffer = BytesBuffer.allocate(idLen); + // Write index-id + buffer.writeIndexId(indexId, type); + } + // Write element-id + buffer.writeId(elemId); + // Write expired time if needed + if (index.hasTtl()) { + buffer.writeVLong(index.expiredTime()); + } + + return buffer.bytes(); + } + + protected void parseIndexName(HugeGraph graph, ConditionQuery query, + BinaryBackendEntry entry, + HugeIndex index, Object fieldValues) { + for (BackendColumn col : entry.columns()) { + if (indexFieldValuesUnmatched(col.value, fieldValues)) { + // Skip if field-values is not matched (just the same hash) + continue; + } + BytesBuffer buffer = BytesBuffer.wrap(col.name); + if (this.indexWithIdPrefix) { + buffer.readIndexId(index.type()); + } + Id elemId = buffer.readId(); + long expiredTime = index.hasTtl() ? buffer.readVLong() : 0L; + index.elementIds(elemId, expiredTime); + } + } + + @Override + public BackendEntry writeVertex(HugeVertex vertex) { + if (vertex.olap()) { + return this.writeOlapVertex(vertex); + } + + BinaryBackendEntry entry = newBackendEntry(vertex); + + if (vertex.removed()) { + return entry; + } + + int propsCount = vertex.sizeOfProperties(); + BytesBuffer buffer = BytesBuffer.allocate(8 + 16 * propsCount); + + // Write vertex label + buffer.writeId(vertex.schemaLabel().id()); + + // Write all properties of the vertex + this.formatProperties(vertex.getProperties(), buffer); + + // Write vertex expired time if needed + if (vertex.hasTtl()) { + entry.ttl(vertex.ttl()); + this.formatExpiredTime(vertex.expiredTime(), buffer); + } + + // Fill column + byte[] name = this.keyWithIdPrefix ? + entry.id().asBytes() : BytesBuffer.BYTES_EMPTY; + entry.column(name, buffer.bytes()); + + return entry; + } + + @Override + public BackendEntry writeOlapVertex(HugeVertex vertex) { + BinaryBackendEntry entry = newBackendEntry(HugeType.OLAP, vertex.id()); + BytesBuffer buffer = BytesBuffer.allocate(8 + 16); + + Collection> properties = vertex.getProperties(); + if (properties.size() != 1) { + E.checkArgument(false, + "Expect 1 property for olap vertex, but got %s", + properties.size()); + } + HugeProperty property = properties.iterator().next(); + PropertyKey propertyKey = property.propertyKey(); + buffer.writeVInt(SchemaElement.schemaId(propertyKey.id())); + buffer.writeProperty(propertyKey, property.value()); + + // Fill column + byte[] name = this.keyWithIdPrefix ? + entry.id().asBytes() : BytesBuffer.BYTES_EMPTY; + entry.column(name, buffer.bytes()); + entry.subId(propertyKey.id()); + entry.olap(true); + return entry; + } + + @Override + public BackendEntry writeVertexProperty(HugeVertexProperty prop) { + throw new NotImplementedException("Unsupported writeVertexProperty()"); + } + + @Override + public HugeVertex readVertex(HugeGraph graph, BackendEntry bytesEntry) { + if (bytesEntry == null) { + return null; + } + BinaryBackendEntry entry = this.convertEntry(bytesEntry); + + // Parse id + Id id = entry.id().origin(); + Id vid = id.edge() ? ((EdgeId) id).ownerVertexId() : id; + HugeVertex vertex = new HugeVertex(graph, vid, VertexLabel.NONE); + + // Parse all properties and edges of a Vertex + Iterator iterator = entry.columns().iterator(); + for (int index = 0; iterator.hasNext(); index++) { + BackendColumn col = iterator.next(); + if (entry.type().isEdge()) { + // NOTE: the entry id type is vertex even if entry type is edge + // Parse vertex edges + this.parseColumn(col, vertex); + } else { + assert entry.type().isVertex(); + // Parse vertex properties + assert entry.columnsSize() >= 1 : entry.columnsSize(); + if (index == 0) { + this.parseVertex(col.value, vertex); + } else { + this.parseVertexOlap(col.value, vertex); + } + } + } + + return vertex; + } + + protected void parseVertexOlap(byte[] value, HugeVertex vertex) { + BytesBuffer buffer = BytesBuffer.wrap(value); + Id pkeyId = IdGenerator.of(buffer.readVInt()); + this.parseProperty(pkeyId, buffer, vertex); + } + + @Override + public BackendEntry writeEdge(HugeEdge edge) { + BinaryBackendEntry entry = newBackendEntry(edge); + byte[] name = this.keyWithIdPrefix ? + entry.id().asBytes() : BytesBuffer.BYTES_EMPTY; + byte[] value = this.formatEdgeValue(edge); + entry.column(name, value); + + if (edge.hasTtl()) { + entry.ttl(edge.ttl()); + } + + return entry; + } + + @Override + public BackendEntry writeEdgeProperty(HugeEdgeProperty prop) { + // TODO: entry.column(this.formatProperty(prop)); + throw new NotImplementedException("Unsupported writeEdgeProperty()"); + } + + @Override + public HugeEdge readEdge(HugeGraph graph, BackendEntry bytesEntry) { + HugeVertex vertex = this.readVertex(graph, bytesEntry); + Collection edges = vertex.getEdges(); + if (edges.size() != 1) { + E.checkState(false, + "Expect 1 edge in vertex, but got %s", + edges.size()); + } + return edges.iterator().next(); + } + + @Override + public BackendEntry writeIndex(HugeIndex index) { + BinaryBackendEntry entry; + if (index.fieldValues() == null && index.elementIds().size() == 0) { + /* + * When field-values is null and elementIds size is 0, it is + * meaningful for deletion of index data by index label. + * TODO: improve + */ + entry = this.formatILDeletion(index); + } else { + Id id = index.id(); + HugeType type = index.type(); + byte[] value = null; + if (!type.isNumericIndex() && indexIdLengthExceedLimit(id)) { + id = index.hashId(); + // Save field-values as column value if the key is a hash string + value = StringEncoding.encode(index.fieldValues().toString()); + } + + entry = newBackendEntry(type, id); + if (index.indexLabel().olap()) { + entry.olap(true); + } + entry.column(this.formatIndexName(index), value); + entry.subId(index.elementId()); + + if (index.hasTtl()) { + entry.ttl(index.ttl()); + } + } + return entry; + } + + @Override + public HugeIndex readIndex(HugeGraph graph, ConditionQuery query, + BackendEntry bytesEntry) { + if (bytesEntry == null) { + return null; + } + + BinaryBackendEntry entry = this.convertEntry(bytesEntry); + // NOTE: index id without length prefix + byte[] bytes = entry.id().asBytes(); + HugeIndex index = HugeIndex.parseIndexId(graph, entry.type(), bytes); + + Object fieldValues = null; + if (!index.type().isRangeIndex()) { + fieldValues = query.condition(HugeKeys.FIELD_VALUES); + if (!index.fieldValues().equals(fieldValues)) { + // Update field-values for hashed or encoded index-id + index.fieldValues(fieldValues); + } + } + + this.parseIndexName(graph, query, entry, index, fieldValues); + return index; + } + + @Override + public BackendEntry writeId(HugeType type, Id id) { + return newBackendEntry(type, id); + } + + @Override + protected Id writeQueryId(HugeType type, Id id) { + if (type.isEdge()) { + id = writeEdgeId(id); + } else if (type.isVertex()) { + BytesBuffer buffer = BytesBuffer.allocate(2 + 1 + id.length()); + writePartitionedId(HugeType.VERTEX, id, buffer); + id = new BinaryId(buffer.bytes(), id); + } else { + BytesBuffer buffer = BytesBuffer.allocate(1 + id.length()); + id = new BinaryId(buffer.writeId(id).bytes(), id); + } + return id; + } + + @Override + protected Query writeQueryEdgeCondition(Query query) { + ConditionQuery cq = (ConditionQuery) query; + if (cq.hasRangeCondition()) { + return this.writeQueryEdgeRangeCondition(cq); + } else { + return this.writeQueryEdgePrefixCondition(cq); + } + } + + private Query writeQueryEdgeRangeCondition(ConditionQuery cq) { + List sortValues = cq.syspropConditions(HugeKeys.SORT_VALUES); + E.checkArgument(sortValues.size() >= 1 && sortValues.size() <= 2, + "Edge range query must be with sort-values range"); + // Would ignore target vertex + Id vertex = cq.condition(HugeKeys.OWNER_VERTEX); + Directions direction = cq.condition(HugeKeys.DIRECTION); + if (direction == null) { + direction = Directions.OUT; + } + Id label = cq.condition(HugeKeys.LABEL); + + BytesBuffer start = BytesBuffer.allocate(BytesBuffer.BUF_EDGE_ID); + writePartitionedId(HugeType.EDGE, vertex, start); + start.write(direction.type().code()); + start.writeId(label); + + BytesBuffer end = BytesBuffer.allocate(BytesBuffer.BUF_EDGE_ID); + end.copyFrom(start); + + RangeConditions range = new RangeConditions(sortValues); + if (range.keyMin() != null) { + start.writeStringRaw((String) range.keyMin()); + } + if (range.keyMax() != null) { + end.writeStringRaw((String) range.keyMax()); + } + // Sort-value will be empty if there is no start sort-value + Id startId = new BinaryId(start.bytes(), null); + // Set endId as prefix if there is no end sort-value + Id endId = new BinaryId(end.bytes(), null); + + boolean includeStart = range.keyMinEq(); + if (cq.paging() && !cq.page().isEmpty()) { + includeStart = true; + byte[] position = PageState.fromString(cq.page()).position(); + E.checkArgument(Bytes.compare(position, startId.asBytes()) >= 0, + "Invalid page out of lower bound"); + startId = new BinaryId(position, null); + } + if (range.keyMax() == null) { + return new IdPrefixQuery(cq, startId, includeStart, endId); + } + return new IdRangeQuery(cq, startId, includeStart, endId, + range.keyMaxEq()); + } + + private Query writeQueryEdgePrefixCondition(ConditionQuery cq) { + int count = 0; + BytesBuffer buffer = BytesBuffer.allocate(BytesBuffer.BUF_EDGE_ID); + for (HugeKeys key : EdgeId.KEYS) { + Object value = cq.condition(key); + + if (value != null) { + count++; + } else { + if (key == HugeKeys.DIRECTION) { + // Direction is null, set to OUT + value = Directions.OUT; + } else { + break; + } + } + + if (key == HugeKeys.OWNER_VERTEX || + key == HugeKeys.OTHER_VERTEX) { + writePartitionedId(HugeType.EDGE, (Id) value, buffer); + } else if (key == HugeKeys.DIRECTION) { + byte t = ((Directions) value).type().code(); + buffer.write(t); + } else if (key == HugeKeys.LABEL) { + assert value instanceof Id; + buffer.writeId((Id) value); + } else if (key == HugeKeys.SORT_VALUES) { + assert value instanceof String; + buffer.writeStringWithEnding((String) value); + } else { + assert false : key; + } + } + + if (count > 0) { + assert count == cq.conditionsSize(); + return prefixQuery(cq, new BinaryId(buffer.bytes(), null)); + } + + return null; + } + + @Override + protected Query writeQueryCondition(Query query) { + HugeType type = query.resultType(); + if (!type.isIndex()) { + return query; + } + + ConditionQuery cq = (ConditionQuery) query; + + if (type.isNumericIndex()) { + // Convert range-index/shard-index query to id range query + return this.writeRangeIndexQuery(cq); + } else { + assert type.isSearchIndex() || type.isSecondaryIndex() || + type.isUniqueIndex(); + // Convert secondary-index or search-index query to id query + return this.writeStringIndexQuery(cq); + } + } + + private Query writeStringIndexQuery(ConditionQuery query) { + E.checkArgument(query.allSysprop() && + query.conditionsSize() == 2, + "There should be two conditions: " + + "INDEX_LABEL_ID and FIELD_VALUES" + + "in secondary index query"); + + Id index = query.condition(HugeKeys.INDEX_LABEL_ID); + Object key = query.condition(HugeKeys.FIELD_VALUES); + + E.checkArgument(index != null, "Please specify the index label"); + E.checkArgument(key != null, "Please specify the index key"); + + Id prefix = formatIndexId(query.resultType(), index, key, true); + return prefixQuery(query, prefix); + } + + private Query writeRangeIndexQuery(ConditionQuery query) { + Id index = query.condition(HugeKeys.INDEX_LABEL_ID); + E.checkArgument(index != null, "Please specify the index label"); + + List fields = query.syspropConditions(HugeKeys.FIELD_VALUES); + E.checkArgument(!fields.isEmpty(), + "Please specify the index field values"); + + HugeType type = query.resultType(); + Id start = null; + if (query.paging() && !query.page().isEmpty()) { + byte[] position = PageState.fromString(query.page()).position(); + start = new BinaryId(position, null); + } + + RangeConditions range = new RangeConditions(fields); + if (range.keyEq() != null) { + Id id = formatIndexId(type, index, range.keyEq(), true); + if (start == null) { + return new IdPrefixQuery(query, id); + } + E.checkArgument(Bytes.compare(start.asBytes(), id.asBytes()) >= 0, + "Invalid page out of lower bound"); + return new IdPrefixQuery(query, start, id); + } + + Object keyMin = range.keyMin(); + Object keyMax = range.keyMax(); + boolean keyMinEq = range.keyMinEq(); + boolean keyMaxEq = range.keyMaxEq(); + if (keyMin == null) { + E.checkArgument(keyMax != null, + "Please specify at least one condition"); + // Set keyMin to min value + keyMin = NumericUtil.minValueOf(keyMax.getClass()); + keyMinEq = true; + } + + Id min = formatIndexId(type, index, keyMin, false); + if (!keyMinEq) { + /* + * Increase 1 to keyMin, index GT query is a scan with GT prefix, + * inclusiveStart=false will also match index started with keyMin + */ + increaseOne(min.asBytes()); + keyMinEq = true; + } + + if (start == null) { + start = min; + } else { + E.checkArgument(Bytes.compare(start.asBytes(), min.asBytes()) >= 0, + "Invalid page out of lower bound"); + } + + if (keyMax == null) { + keyMax = NumericUtil.maxValueOf(keyMin.getClass()); + keyMaxEq = true; + } + Id max = formatIndexId(type, index, keyMax, false); + if (keyMaxEq) { + keyMaxEq = false; + increaseOne(max.asBytes()); + } + return new IdRangeQuery(query, start, keyMinEq, max, keyMaxEq); + } + + private BinaryId writeEdgeId(Id id) { + EdgeId edgeId; + if (id.type() == EDGE) { + edgeId = (EdgeId) id; + } else { + edgeId = EdgeId.parse(id.asString()); + } + BytesBuffer buffer = BytesBuffer.allocate(BytesBuffer.BUF_EDGE_ID); + if (this.enablePartition) { + buffer.writeShort(getPartition(HugeType.EDGE, edgeId.ownerVertexId())); + buffer.writeEdgeId(edgeId); + } else { + buffer.writeEdgeId(edgeId); + } + return new BinaryId(buffer.bytes(), id); + } + + private void writePartitionedId(HugeType type, Id id, BytesBuffer buffer) { + if (this.enablePartition) { + buffer.writeShort(getPartition(type, id)); + buffer.writeId(id); + } else { + buffer.writeId(id); + } + } + + protected short getPartition(HugeType type, Id id) { + return 0; + } + + public BackendEntry parse(BackendEntry originEntry) { + byte[] bytes = originEntry.id().asBytes(); + BinaryBackendEntry parsedEntry = new BinaryBackendEntry(originEntry.type(), bytes, + this.enablePartition); + if (this.enablePartition) { + bytes = Arrays.copyOfRange(bytes, parsedEntry.id().length() + 2, bytes.length); + } else { + bytes = Arrays.copyOfRange(bytes, parsedEntry.id().length(), bytes.length); + } + BytesBuffer buffer = BytesBuffer.allocate(BytesBuffer.BUF_EDGE_ID); + buffer.write(parsedEntry.id().asBytes()); + buffer.write(bytes); + parsedEntry = new BinaryBackendEntry(originEntry.type(), new BinaryId(buffer.bytes(), + BytesBuffer.wrap(buffer.bytes()).readEdgeId())); + + for (BackendEntry.BackendColumn col : originEntry.columns()) { + parsedEntry.column(buffer.bytes(), col.value); + } + return parsedEntry; + } + + private static Query prefixQuery(ConditionQuery query, Id prefix) { + Query newQuery; + if (query.paging() && !query.page().isEmpty()) { + /* + * If used paging and the page number is not empty, deserialize + * the page to id and use it as the starting row for this query + */ + byte[] position = PageState.fromString(query.page()).position(); + E.checkArgument(Bytes.compare(position, prefix.asBytes()) >= 0, + "Invalid page out of lower bound"); + BinaryId start = new BinaryId(position, null); + newQuery = new IdPrefixQuery(query, start, prefix); + } else { + newQuery = new IdPrefixQuery(query, prefix); + } + return newQuery; + } + + protected static BinaryId formatIndexId(HugeType type, Id indexLabel, + Object fieldValues, + boolean equal) { + boolean withEnding = type.isRangeIndex() || equal; + Id id = HugeIndex.formatIndexId(type, indexLabel, fieldValues); + if (!type.isNumericIndex() && indexIdLengthExceedLimit(id)) { + id = HugeIndex.formatIndexHashId(type, indexLabel, fieldValues); + } + BytesBuffer buffer = BytesBuffer.allocate(1 + id.length()); + byte[] idBytes = buffer.writeIndexId(id, type, withEnding).bytes(); + return new BinaryId(idBytes, id); + } + + protected static boolean indexIdLengthExceedLimit(Id id) { + return id.asBytes().length > BytesBuffer.INDEX_HASH_ID_THRESHOLD; + } + + protected static boolean indexFieldValuesUnmatched(byte[] value, + Object fieldValues) { + if (value != null && value.length > 0 && fieldValues != null) { + if (!StringEncoding.decode(value).equals(fieldValues)) { + return true; + } + } + return false; + } + + public static final byte[] increaseOne(byte[] bytes) { + final byte BYTE_MAX_VALUE = (byte) 0xff; + assert bytes.length > 0; + byte last = bytes[bytes.length - 1]; + if (last != BYTE_MAX_VALUE) { + bytes[bytes.length - 1] += 0x01; + } else { + // Process overflow (like [1, 255] => [2, 0]) + int i = bytes.length - 1; + for (; i > 0 && bytes[i] == BYTE_MAX_VALUE; --i) { + bytes[i] += 0x01; + } + if (bytes[i] == BYTE_MAX_VALUE) { + assert i == 0; + throw new BackendException("Unable to increase bytes: %s", + Bytes.toHex(bytes)); + } + bytes[i] += 0x01; + } + return bytes; + } + + @Override + public BackendEntry writeVertexLabel(VertexLabel vertexLabel) { + SchemaSerializer serializer = new SchemaSerializer(); + return serializer.writeVertexLabel(vertexLabel); + } + + @Override + public VertexLabel readVertexLabel(HugeGraph graph, + BackendEntry backendEntry) { + if (backendEntry == null) { + return null; + } + BinaryBackendEntry entry = this.convertEntry(backendEntry); + + SchemaSerializer serializer = new SchemaSerializer(); + return serializer.readVertexLabel(graph, entry); + } + + @Override + public BackendEntry writeEdgeLabel(EdgeLabel edgeLabel) { + SchemaSerializer serializer = new SchemaSerializer(); + return serializer.writeEdgeLabel(edgeLabel); + } + + @Override + public EdgeLabel readEdgeLabel(HugeGraph graph, BackendEntry backendEntry) { + if (backendEntry == null) { + return null; + } + BinaryBackendEntry entry = this.convertEntry(backendEntry); + + SchemaSerializer serializer = new SchemaSerializer(); + return serializer.readEdgeLabel(graph, entry); + } + + @Override + public BackendEntry writePropertyKey(PropertyKey propertyKey) { + SchemaSerializer serializer = new SchemaSerializer(); + return serializer.writePropertyKey(propertyKey); + } + + @Override + public PropertyKey readPropertyKey(HugeGraph graph, + BackendEntry backendEntry) { + if (backendEntry == null) { + return null; + } + BinaryBackendEntry entry = this.convertEntry(backendEntry); + + SchemaSerializer serializer = new SchemaSerializer(); + return serializer.readPropertyKey(graph, entry); + } + + @Override + public BackendEntry writeIndexLabel(IndexLabel indexLabel) { + SchemaSerializer serializer = new SchemaSerializer(); + return serializer.writeIndexLabel(indexLabel); + } + + @Override + public IndexLabel readIndexLabel(HugeGraph graph, + BackendEntry backendEntry) { + if (backendEntry == null) { + return null; + } + BinaryBackendEntry entry = this.convertEntry(backendEntry); + + SchemaSerializer serializer = new SchemaSerializer(); + return serializer.readIndexLabel(graph, entry); + } + + private final class SchemaSerializer { + + private BinaryBackendEntry entry; + + public BinaryBackendEntry writeVertexLabel(VertexLabel schema) { + this.entry = newBackendEntry(schema); + writeString(HugeKeys.NAME, schema.name()); + writeEnum(HugeKeys.ID_STRATEGY, schema.idStrategy()); + writeIds(HugeKeys.PROPERTIES, schema.properties()); + writeIds(HugeKeys.PRIMARY_KEYS, schema.primaryKeys()); + writeIds(HugeKeys.NULLABLE_KEYS, schema.nullableKeys()); + writeIds(HugeKeys.INDEX_LABELS, schema.indexLabels()); + writeBool(HugeKeys.ENABLE_LABEL_INDEX, schema.enableLabelIndex()); + writeEnum(HugeKeys.STATUS, schema.status()); + writeLong(HugeKeys.TTL, schema.ttl()); + writeId(HugeKeys.TTL_START_TIME, schema.ttlStartTime()); + writeUserdata(schema); + return this.entry; + } + + public VertexLabel readVertexLabel(HugeGraph graph, + BinaryBackendEntry entry) { + E.checkNotNull(entry, "entry"); + this.entry = entry; + Id id = entry.id().origin(); + String name = readString(HugeKeys.NAME); + + VertexLabel vertexLabel = new VertexLabel(graph, id, name); + vertexLabel.idStrategy(readEnum(HugeKeys.ID_STRATEGY, + IdStrategy.class)); + vertexLabel.properties(readIds(HugeKeys.PROPERTIES)); + vertexLabel.primaryKeys(readIds(HugeKeys.PRIMARY_KEYS)); + vertexLabel.nullableKeys(readIds(HugeKeys.NULLABLE_KEYS)); + vertexLabel.addIndexLabels(readIds(HugeKeys.INDEX_LABELS)); + vertexLabel.enableLabelIndex(readBool(HugeKeys.ENABLE_LABEL_INDEX)); + vertexLabel.status(readEnum(HugeKeys.STATUS, SchemaStatus.class)); + vertexLabel.ttl(readLong(HugeKeys.TTL)); + vertexLabel.ttlStartTime(readId(HugeKeys.TTL_START_TIME)); + readUserdata(vertexLabel); + return vertexLabel; + } + + public BinaryBackendEntry writeEdgeLabel(EdgeLabel schema) { + this.entry = newBackendEntry(schema); + writeString(HugeKeys.NAME, schema.name()); + writeId(HugeKeys.SOURCE_LABEL, schema.sourceLabel()); + writeId(HugeKeys.TARGET_LABEL, schema.targetLabel()); + writeEnum(HugeKeys.FREQUENCY, schema.frequency()); + writeIds(HugeKeys.PROPERTIES, schema.properties()); + writeIds(HugeKeys.SORT_KEYS, schema.sortKeys()); + writeIds(HugeKeys.NULLABLE_KEYS, schema.nullableKeys()); + writeIds(HugeKeys.INDEX_LABELS, schema.indexLabels()); + writeBool(HugeKeys.ENABLE_LABEL_INDEX, schema.enableLabelIndex()); + writeEnum(HugeKeys.STATUS, schema.status()); + writeLong(HugeKeys.TTL, schema.ttl()); + writeId(HugeKeys.TTL_START_TIME, schema.ttlStartTime()); + writeUserdata(schema); + return this.entry; + } + + public EdgeLabel readEdgeLabel(HugeGraph graph, + BinaryBackendEntry entry) { + E.checkNotNull(entry, "entry"); + this.entry = entry; + Id id = entry.id().origin(); + String name = readString(HugeKeys.NAME); + + EdgeLabel edgeLabel = new EdgeLabel(graph, id, name); + edgeLabel.sourceLabel(readId(HugeKeys.SOURCE_LABEL)); + edgeLabel.targetLabel(readId(HugeKeys.TARGET_LABEL)); + edgeLabel.frequency(readEnum(HugeKeys.FREQUENCY, Frequency.class)); + edgeLabel.properties(readIds(HugeKeys.PROPERTIES)); + edgeLabel.sortKeys(readIds(HugeKeys.SORT_KEYS)); + edgeLabel.nullableKeys(readIds(HugeKeys.NULLABLE_KEYS)); + edgeLabel.addIndexLabels(readIds(HugeKeys.INDEX_LABELS)); + edgeLabel.enableLabelIndex(readBool(HugeKeys.ENABLE_LABEL_INDEX)); + edgeLabel.status(readEnum(HugeKeys.STATUS, SchemaStatus.class)); + edgeLabel.ttl(readLong(HugeKeys.TTL)); + edgeLabel.ttlStartTime(readId(HugeKeys.TTL_START_TIME)); + readUserdata(edgeLabel); + return edgeLabel; + } + + public BinaryBackendEntry writePropertyKey(PropertyKey schema) { + this.entry = newBackendEntry(schema); + writeString(HugeKeys.NAME, schema.name()); + writeEnum(HugeKeys.DATA_TYPE, schema.dataType()); + writeEnum(HugeKeys.CARDINALITY, schema.cardinality()); + writeEnum(HugeKeys.AGGREGATE_TYPE, schema.aggregateType()); + writeEnum(HugeKeys.WRITE_TYPE, schema.writeType()); + writeIds(HugeKeys.PROPERTIES, schema.properties()); + writeEnum(HugeKeys.STATUS, schema.status()); + writeUserdata(schema); + return this.entry; + } + + public PropertyKey readPropertyKey(HugeGraph graph, + BinaryBackendEntry entry) { + E.checkNotNull(entry, "entry"); + this.entry = entry; + Id id = entry.id().origin(); + String name = readString(HugeKeys.NAME); + + PropertyKey propertyKey = new PropertyKey(graph, id, name); + propertyKey.dataType(readEnum(HugeKeys.DATA_TYPE, DataType.class)); + propertyKey.cardinality(readEnum(HugeKeys.CARDINALITY, + Cardinality.class)); + propertyKey.aggregateType(readEnum(HugeKeys.AGGREGATE_TYPE, + AggregateType.class)); + propertyKey.writeType(readEnumOrDefault(HugeKeys.WRITE_TYPE, + WriteType.class, + WriteType.OLTP)); + propertyKey.properties(readIds(HugeKeys.PROPERTIES)); + propertyKey.status(readEnum(HugeKeys.STATUS, SchemaStatus.class)); + readUserdata(propertyKey); + return propertyKey; + } + + public BinaryBackendEntry writeIndexLabel(IndexLabel schema) { + this.entry = newBackendEntry(schema); + writeString(HugeKeys.NAME, schema.name()); + writeEnum(HugeKeys.BASE_TYPE, schema.baseType()); + writeId(HugeKeys.BASE_VALUE, schema.baseValue()); + writeEnum(HugeKeys.INDEX_TYPE, schema.indexType()); + writeIds(HugeKeys.FIELDS, schema.indexFields()); + writeEnum(HugeKeys.STATUS, schema.status()); + writeUserdata(schema); + return this.entry; + } + + public IndexLabel readIndexLabel(HugeGraph graph, + BinaryBackendEntry entry) { + E.checkNotNull(entry, "entry"); + this.entry = entry; + Id id = entry.id().origin(); + String name = readString(HugeKeys.NAME); + + IndexLabel indexLabel = new IndexLabel(graph, id, name); + indexLabel.baseType(readEnum(HugeKeys.BASE_TYPE, HugeType.class)); + indexLabel.baseValue(readId(HugeKeys.BASE_VALUE)); + indexLabel.indexType(readEnum(HugeKeys.INDEX_TYPE, + IndexType.class)); + indexLabel.indexFields(readIds(HugeKeys.FIELDS)); + indexLabel.status(readEnum(HugeKeys.STATUS, SchemaStatus.class)); + readUserdata(indexLabel); + return indexLabel; + } + + private void writeUserdata(SchemaElement schema) { + String userdataStr = JsonUtil.toJson(schema.userdata()); + writeString(HugeKeys.USER_DATA, userdataStr); + } + + private void readUserdata(SchemaElement schema) { + // Parse all user data of a schema element + byte[] userdataBytes = column(HugeKeys.USER_DATA); + String userdataStr = StringEncoding.decode(userdataBytes); + @SuppressWarnings("unchecked") + Map userdata = JsonUtil.fromJson(userdataStr, + Map.class); + for (Map.Entry e : userdata.entrySet()) { + schema.userdata(e.getKey(), e.getValue()); + } + } + + private void writeString(HugeKeys key, String value) { + this.entry.column(formatColumnName(key), + StringEncoding.encode(value)); + } + + private String readString(HugeKeys key) { + return StringEncoding.decode(column(key)); + } + + private void writeEnum(HugeKeys key, DataType value) { + this.entry.column(formatColumnName(key), new byte[]{value.code()}); + } + + private T readEnum(HugeKeys key, + Class clazz) { + byte[] value = column(key); + E.checkState(value.length == 1, + "The length of column '%s' must be 1, but is '%s'", + key, value.length); + return DataType.fromCode(clazz, value[0]); + } + + private T readEnumOrDefault(HugeKeys key, + Class clazz, + T defaultValue) { + BackendColumn column = this.entry.column(formatColumnName(key)); + if (column == null) { + return defaultValue; + } + E.checkNotNull(column.value, "column.value"); + return DataType.fromCode(clazz, column.value[0]); + } + + private void writeLong(HugeKeys key, long value) { + @SuppressWarnings("resource") + BytesBuffer buffer = new BytesBuffer(8); + buffer.writeVLong(value); + this.entry.column(formatColumnName(key), buffer.bytes()); + } + + private long readLong(HugeKeys key) { + byte[] value = column(key); + BytesBuffer buffer = BytesBuffer.wrap(value); + return buffer.readVLong(); + } + + private void writeId(HugeKeys key, Id value) { + this.entry.column(formatColumnName(key), writeId(value)); + } + + private Id readId(HugeKeys key) { + return readId(column(key)); + } + + private void writeIds(HugeKeys key, Collection value) { + this.entry.column(formatColumnName(key), writeIds(value)); + } + + private Id[] readIds(HugeKeys key) { + return readIds(column(key)); + } + + private void writeBool(HugeKeys key, boolean value) { + this.entry.column(formatColumnName(key), + new byte[]{(byte) (value ? 1 : 0)}); + } + + private boolean readBool(HugeKeys key) { + byte[] value = column(key); + E.checkState(value.length == 1, + "The length of column '%s' must be 1, but is '%s'", + key, value.length); + return value[0] != (byte) 0; + } + + private byte[] writeId(Id id) { + int size = 1 + id.length(); + BytesBuffer buffer = BytesBuffer.allocate(size); + buffer.writeId(id); + return buffer.bytes(); + } + + private Id readId(byte[] value) { + BytesBuffer buffer = BytesBuffer.wrap(value); + return buffer.readId(); + } + + private byte[] writeIds(Collection ids) { + E.checkState(ids.size() <= BytesBuffer.UINT16_MAX, + "The number of properties of vertex/edge label " + + "can't exceed '%s'", BytesBuffer.UINT16_MAX); + int size = 2; + for (Id id : ids) { + size += (1 + id.length()); + } + BytesBuffer buffer = BytesBuffer.allocate(size); + buffer.writeUInt16(ids.size()); + for (Id id : ids) { + buffer.writeId(id); + } + return buffer.bytes(); + } + + private Id[] readIds(byte[] value) { + BytesBuffer buffer = BytesBuffer.wrap(value); + int size = buffer.readUInt16(); + Id[] ids = new Id[size]; + for (int i = 0; i < size; i++) { + Id id = buffer.readId(); + ids[i] = id; + } + return ids; + } + + private byte[] column(HugeKeys key) { + BackendColumn column = this.entry.column(formatColumnName(key)); + E.checkState(column != null, "Not found key '%s' from entry %s", + key, this.entry); + E.checkNotNull(column.value, "column.value"); + return column.value; + } + + private byte[] formatColumnName(HugeKeys key) { + Id id = this.entry.id().origin(); + int size = 1 + id.length() + 1; + BytesBuffer buffer = BytesBuffer.allocate(size); + buffer.writeId(id); + buffer.write(key.code()); + return buffer.bytes(); + } + } +} diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/reuse/BytesDemo.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/reuse/BytesDemo.java new file mode 100644 index 000000000..6571fbee6 --- /dev/null +++ b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/reuse/BytesDemo.java @@ -0,0 +1,199 @@ +package com.baidu.hugegraph.serializer.direct.reuse; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import com.baidu.hugegraph.driver.GraphManager; +import com.baidu.hugegraph.driver.HugeClient; +import com.baidu.hugegraph.driver.SchemaManager; +import com.baidu.hugegraph.serializer.direct.BinaryEntry; +import com.baidu.hugegraph.serializer.direct.RocksDBSerializer; +import com.baidu.hugegraph.structure.GraphElement; +import com.baidu.hugegraph.structure.graph.Edge; +import com.baidu.hugegraph.structure.graph.Vertex; + +/** + * @author jin + * This class is a demo for rocksdb put(rowkey, values) which use Client-Side's graph struct + * And we don't need to construct the graph element, just use it and transfer them to bytes array + * instead of json format + */ +public class BytesDemo { + + static HugeClient client; + boolean bypassServer = true; + RocksDBSerializer ser; + + public static void main(String[] args) { + BytesDemo ins = new BytesDemo(); + ins.initGraph(); + } + + void initGraph() { + // If connect failed will throw an exception. + client = HugeClient.builder("http://localhost:8080", "hugegraph").build(); + + SchemaManager schema = client.schema(); + + schema.propertyKey("name").asText().ifNotExist().create(); + schema.propertyKey("age").asInt().ifNotExist().create(); + schema.propertyKey("lang").asText().ifNotExist().create(); + schema.propertyKey("date").asDate().ifNotExist().create(); + schema.propertyKey("price").asInt().ifNotExist().create(); + + schema.vertexLabel("person") + .properties("name", "age") + .primaryKeys("name") + .ifNotExist() + .create(); + + schema.vertexLabel("person") + .properties("price") + .nullableKeys("price") + .append(); + + schema.vertexLabel("software") + .properties("name", "lang", "price") + .primaryKeys("name") + .ifNotExist() + .create(); + + schema.indexLabel("softwareByPrice") + .onV("software").by("price") + .range() + .ifNotExist() + .create(); + + schema.edgeLabel("knows") + .link("person", "person") + .properties("date") + .ifNotExist() + .create(); + + schema.edgeLabel("created") + .link("person", "software") + .properties("date") + .ifNotExist() + .create(); + + schema.indexLabel("createdByDate") + .onE("created").by("date") + .secondary() + .ifNotExist() + .create(); + + ser = new RocksDBSerializer(client); + writeGraphElements(); + + client.close(); + } + + private void writeGraphElements() { + GraphManager graph = client.graph(); + + // construct some vertexes & edges + Vertex marko = new Vertex("person").property("name", "marko").property("age", 29); + Vertex vadas = new Vertex("person").property("name", "vadas").property("age", 27); + Vertex lop = new Vertex("software").property("name", "lop").property("lang", "java") + .property("price", 328); + Vertex josh = new Vertex("person").property("name", "josh").property("age", 32); + Vertex ripple = new Vertex("software").property("name", "ripple") + .property("lang", "java") + .property("price", 199); + Vertex peter = new Vertex("person").property("name", "peter").property("age", 35); + + Edge markoKnowsVadas = new Edge("knows").source(marko).target(vadas) + .property("date", "2016-01-10"); + Edge markoKnowsJosh = new Edge("knows").source(marko).target(josh) + .property("date", "2013-02-20"); + Edge markoCreateLop = new Edge("created").source(marko).target(lop) + .property("date", "2017-12-10"); + Edge joshCreateRipple = new Edge("created").source(josh).target(ripple) + .property("date", "2017-12-10"); + Edge joshCreateLop = new Edge("created").source(josh).target(lop) + .property("date", "2009-11-11"); + Edge peterCreateLop = new Edge("created").source(peter).target(lop) + .property("date", "2017-03-24"); + + List vertices = new ArrayList(){{ + add(marko);add(vadas);add(lop);add(josh);add(ripple);add(peter); + }}; + + + List edges = new ArrayList(){{ + add(markoKnowsVadas);add(markoKnowsJosh);add(markoCreateLop);add(joshCreateRipple); + add(joshCreateLop);add(peterCreateLop); + }}; + + // Old way: encode to json then send to server + if (bypassServer) { + writeDirectly(vertices, edges); + } else { + writeByServer(graph, vertices, edges); + } + } + + /* we transfer the vertex & edge into bytes array + * TODO: use a batch and send them together + * */ + void writeDirectly(List vertices, List edges) { + for (Vertex vertex : vertices) { + BinaryEntry entry = ser.writeVertex(vertex); + byte[] rowkey = getKeyBytes(vertex); + byte[] values = getValueBytes(vertex); + sendRpcToRocksDB(rowkey, values); + } + + for (Edge edge: edges) { + byte[] rowkey = getKeyBytes(edge); + byte[] values = getValueBytes(edge); + sendRpcToRocksDB(rowkey, values); + } + } + + byte[] getKeyBytes(GraphElement e) { + Object id = e.id(); + String type = e.type(); + return id2Bytes(id, type); + } + + byte[] id2Bytes(Object id, String type) { + byte[] res = null; + + if ("vertex".equals(type)) { + ser.writeVertex() + } else if ("edge".equals(type)) { + + } + + return res; + } + + byte[] getValueBytes(GraphElement e) { + Map properties = e.properties(); + return propertyToBytes(properties); + } + + byte[] propertyToBytes(Map properties) { + byte[] res = null; + + return res; + } + + boolean sendRpcToRocksDB(byte[] rowkey, byte[] values) { + // here we call the rpc + boolean flag = false; + //flag = put(rowkey, values); + return flag; + } + + void writeByServer(GraphManager graph, List vertices, List edges) { + vertices = graph.addVertices(vertices); + vertices.forEach(System.out::println); + + edges = graph.addEdges(edges, false); + edges.forEach(System.out::println); + } + +} diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/struct/DataType.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/struct/DataType.java new file mode 100644 index 000000000..a12aa4607 --- /dev/null +++ b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/struct/DataType.java @@ -0,0 +1,205 @@ +/* + * Copyright 2017 HugeGraph Authors + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.baidu.hugegraph.serializer.direct.struct; + +import java.util.Date; +import java.util.UUID; + +import com.baidu.hugegraph.serializer.direct.util.HugeException; +import com.baidu.hugegraph.serializer.direct.util.StringEncoding; +import com.baidu.hugegraph.util.CollectionUtil; +import com.baidu.hugegraph.util.DateUtil; +import com.baidu.hugegraph.util.E; +import com.google.common.collect.HashBasedTable; +import com.google.common.collect.Table; + +public enum DataType { + + UNKNOWN(0, "unknown", Object.class), + OBJECT(1, "object", Object.class), + BOOLEAN(2, "boolean", Boolean.class), + BYTE(3, "byte", Byte.class), + INT(4, "int", Integer.class), + LONG(5, "long", Long.class), + FLOAT(6, "float", Float.class), + DOUBLE(7, "double", Double.class), + TEXT(8, "text", String.class), + //BLOB(9, "blob", Blob.class), + DATE(10, "date", Date.class), + UUID(11, "uuid", UUID.class); + + private final byte code; + private final String name; + private final Class clazz; + + static { + register(DataType.class); + } + + static Table, Byte, DataType> TABLE = HashBasedTable.create(); + + static void register(Class clazz) { + Object enums; + try { + enums = clazz.getMethod("values").invoke(null); + } catch (Exception e) { + throw new HugeException("DataType invalid", e); + } + for (DataType e : CollectionUtil.toList(enums)) { + TABLE.put(clazz, e.code(), e); + } + } + + static T fromCode(Class clazz, byte code) { + @SuppressWarnings("unchecked") + T value = (T) TABLE.get(clazz, code); + if (value == null) { + E.checkArgument(false, "Can't construct %s from code %s", + clazz.getSimpleName(), code); + } + return value; + } + + DataType(int code, String name, Class clazz) { + assert code < 256; + this.code = (byte) code; + this.name = name; + this.clazz = clazz; + } + + public byte code() { + return this.code; + } + + public String string() { + return this.name; + } + + public Class clazz() { + return this.clazz; + } + + public boolean isText() { + return this == DataType.TEXT; + } + + public boolean isNumber() { + return this == BYTE || this == INT || this == LONG || + this == FLOAT || this == DOUBLE; + } + + public boolean isNumber4() { + // Store index value of Byte using 4 bytes + return this == BYTE || this == INT || this == FLOAT; + } + + public boolean isNumber8() { + return this == LONG || this == DOUBLE; + } + + //public boolean isBlob() { + // return this == DataType.BLOB; + //} + + public boolean isDate() { + return this == DataType.DATE; + } + + public boolean isUUID() { + return this == DataType.UUID; + } + + public Number valueToNumber(V value) { + if (!(this.isNumber() && value instanceof Number)) { + return null; + } + if (this.clazz.isInstance(value)) { + return (Number) value; + } + + Number number; + try { + switch (this) { + case BYTE: + number = Byte.valueOf(value.toString()); + break; + case INT: + number = Integer.valueOf(value.toString()); + break; + case LONG: + number = Long.valueOf(value.toString()); + break; + case FLOAT: + number = Float.valueOf(value.toString()); + break; + case DOUBLE: + number = Double.valueOf(value.toString()); + break; + default: + throw new AssertionError(String.format( + "Number type only contains Byte, Integer, " + + "Long, Float, Double, but got %s", this.clazz())); + } + } catch (NumberFormatException e) { + throw new IllegalArgumentException(String.format( + "Can't read '%s' as %s: %s", + value, this.name, e.getMessage())); + } + return number; + } + + public Date valueToDate(V value) { + if (!this.isDate()) { + return null; + } + if (value instanceof Date) { + return (Date) value; + } else if (value instanceof Integer) { + return new Date(((Number) value).intValue()); + } else if (value instanceof Long) { + return new Date(((Number) value).longValue()); + } else if (value instanceof String) { + return DateUtil.parse((String) value); + } + return null; + } + + public UUID valueToUUID(V value) { + if (!this.isUUID()) { + return null; + } + if (value instanceof UUID) { + return (UUID) value; + } else if (value instanceof String) { + return StringEncoding.uuid((String) value); + } + return null; + } + + + public static DataType fromClass(Class clazz) { + for (DataType type : DataType.values()) { + if (type.clazz() == clazz) { + return type; + } + } + throw new HugeException("Unknown clazz '%s' for DataType", clazz); + } +} diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/struct/ElementBuilder.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/struct/ElementBuilder.java new file mode 100644 index 000000000..74ba99f9d --- /dev/null +++ b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/struct/ElementBuilder.java @@ -0,0 +1,625 @@ +/* + * Copyright 2017 HugeGraph Authors + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.baidu.hugegraph.serializer.direct.struct; + +import java.nio.ByteBuffer; +import java.nio.CharBuffer; +import java.nio.charset.CharsetEncoder; +import java.nio.charset.CoderResult; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.UUID; +import java.util.stream.Collectors; + +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.collections.ListUtils; +import org.apache.commons.lang3.StringUtils; + +import com.baidu.hugegraph.driver.HugeClient; +import com.baidu.hugegraph.serializer.direct.util.DataTypeUtil; +import com.baidu.hugegraph.serializer.direct.util.GraphSchema; +import com.baidu.hugegraph.structure.GraphElement; +import com.baidu.hugegraph.structure.constant.IdStrategy; +import com.baidu.hugegraph.structure.graph.Vertex; +import com.baidu.hugegraph.structure.schema.EdgeLabel; +import com.baidu.hugegraph.structure.schema.PropertyKey; +import com.baidu.hugegraph.structure.schema.SchemaLabel; +import com.baidu.hugegraph.structure.schema.VertexLabel; +import com.baidu.hugegraph.util.E; +import com.baidu.hugegraph.util.LongEncoding; +import com.google.common.base.Charsets; +import com.google.common.collect.ImmutableList; + +public abstract class ElementBuilder { + + private final GraphSchema schema; + + // NOTE: CharsetEncoder is not thread safe + private final CharsetEncoder encoder; + private final ByteBuffer buffer; + + public ElementBuilder(HugeClient client) { + this.schema = new GraphSchema(client); + this.encoder = Charsets.UTF_8.newEncoder(); + this.buffer = ByteBuffer.allocate(128); + } + + public abstract List build(String[] names, Object[] values); + + public abstract SchemaLabel schemaLabel(); + + protected abstract Collection nonNullableKeys(); + + protected abstract boolean isIdField(String fieldName); + + @SuppressWarnings("unchecked") + protected Collection nonNullableKeys(SchemaLabel schemaLabel) { + return CollectionUtils.subtract(schemaLabel.properties(), + schemaLabel.nullableKeys()); + } + + protected VertexKVPairs newKVPairs(VertexLabel vertexLabel, + boolean unfold) { + IdStrategy idStrategy = vertexLabel.idStrategy(); + if (idStrategy.isCustomize()) { + if (unfold) { + return new VertexFlatIdKVPairs(vertexLabel); + } else { + return new VertexIdKVPairs(vertexLabel); + } + } else { + assert idStrategy.isPrimaryKey(); + if (unfold) { + return new VertexFlatPkKVPairs(vertexLabel); + } else { + return new VertexPkKVPairs(vertexLabel); + } + } + } + + protected void addProperty(GraphElement element, String key, Object value) { + this.addProperty(element, key, value, true); + } + + protected void addProperty(GraphElement element, String key, Object value, + boolean needConvert) { + if (needConvert) { + value = this.convertPropertyValue(key, value); + } + element.property(key, value); + } + + protected void addProperties(GraphElement element, + Map properties) { + for (Map.Entry entry : properties.entrySet()) { + String key = entry.getKey(); + Object value = entry.getValue(); + this.checkFieldValue(key, value); + value = this.convertPropertyValue(key, value); + + element.property(key, value); + } + } + + protected void checkNonNullableKeys(GraphElement element) { + Set keys = element.properties().keySet(); + // Check whether passed all non-null property + Collection requiredKeys = this.nonNullableKeys(); + if (!keys.containsAll(requiredKeys)) { + @SuppressWarnings("unchecked") + Collection missed = CollectionUtils.subtract(requiredKeys, + keys); + E.checkArgument(false, "All non-null property keys %s of '%s' " + + "must be setted, but missed keys %s", + requiredKeys, this.schemaLabel().name(), missed); + } + } + + protected PropertyKey getPropertyKey(String name) { + return this.schema.getPropertyKey(name); + } + + protected VertexLabel getVertexLabel(String name) { + return this.schema.getVertexLabel(name); + } + + protected EdgeLabel getEdgeLabel(String name) { + return this.schema.getEdgeLabel(name); + } + + protected Object mappingValue(String fieldName, Object fieldValue) { + if (this.mapping().mappingValues().isEmpty()) { + return fieldValue; + } + String fieldStrValue = String.valueOf(fieldValue); + return this.mapping().mappingValue(fieldName, fieldStrValue); + } + + private void customizeId(VertexLabel vertexLabel, Vertex vertex, + String idField, Object idValue) { + E.checkArgumentNotNull(idField, "The vertex id field can't be null"); + E.checkArgumentNotNull(idValue, "The vertex id value can't be null"); + IdStrategy idStrategy = vertexLabel.idStrategy(); + if (idStrategy.isCustomizeString()) { + String id = (String) idValue; + this.checkVertexIdLength(id); + vertex.id(id); + } else if (idStrategy.isCustomizeNumber()) { + Long id = DataTypeUtil.parseNumber(idField, idValue); + vertex.id(id); + } else { + assert idStrategy.isCustomizeUuid(); + UUID id = DataTypeUtil.parseUUID(idField, idValue); + vertex.id(id); + } + } + + private Object convertPropertyValue(String key, Object rawValue) { + PropertyKey propertyKey = this.getPropertyKey(key); + InputSource inputSource = this.struct.input(); + return DataTypeUtil.convert(rawValue, propertyKey, inputSource); + } + + private void checkFieldValue(String fieldName, Object fieldValue) { + if (this.mapping().mappingValues().isEmpty() || + !this.mapping().mappingValues().containsKey(fieldName)) { + return; + } + // NOTE: The nullable values has been filtered before this + E.checkArgument(fieldValue != null, "The field value can't be null"); + E.checkArgument(DataTypeUtil.isSimpleValue(fieldValue), + "The field value must be simple type, actual is '%s'", + fieldValue.getClass()); + } + + private boolean vertexIdEmpty(VertexLabel vertexLabel, Vertex vertex) { + IdStrategy idStrategy = vertexLabel.idStrategy(); + if (idStrategy.isCustomizeString()) { + Object vertexId = vertex.id(); + return vertexId == null || StringUtils.isEmpty((String) vertexId); + } + return false; + } + + private void checkPrimaryValuesValid(VertexLabel vertexLabel, + Object[] primaryValues) { + List primaryKeys = vertexLabel.primaryKeys(); + E.checkArgument(primaryKeys.size() == primaryValues.length, + "Missing some primary key values, expect %s, " + + "but only got %s for vertex label '%s'", + primaryKeys, Arrays.toString(primaryValues), + vertexLabel); + for (int i = 0; i < primaryKeys.size(); i++) { + E.checkArgument(primaryValues[i] != null, + "Make sure the value of the primary key '%s' is " + + "not empty, or check whether the headers or " + + "field_mapping are configured correctly", + primaryKeys.get(i)); + } + } + + private String spliceVertexId(VertexLabel vertexLabel, + Object... primaryValues) { + StringBuilder vertexId = new StringBuilder(); + StringBuilder vertexKeysId = new StringBuilder(); + for (int i = 0; i < primaryValues.length; i++) { + Object value = primaryValues[i]; + String pkValue; + if (value instanceof Number || value instanceof Date) { + pkValue = LongEncoding.encodeNumber(value); + } else { + pkValue = String.valueOf(value); + } + if (StringUtils.containsAny(pkValue, Constants.SEARCH_LIST)) { + pkValue = StringUtils.replaceEach(pkValue, + Constants.SEARCH_LIST, + Constants.TARGET_LIST); + } + vertexKeysId.append(pkValue); + vertexKeysId.append("!"); + } + vertexId.append(vertexLabel.id()).append(":").append(vertexKeysId); + vertexId.deleteCharAt(vertexId.length() - 1); + return vertexId.toString(); + } + + private void checkVertexIdLength(String id) { + this.encoder.reset(); + this.buffer.clear(); + CoderResult r = this.encoder.encode(CharBuffer.wrap(id.toCharArray()), + this.buffer, true); + E.checkArgument(r.isUnderflow(), + "The vertex id length exceeds limit %s : '%s'", + Constants.VERTEX_ID_LIMIT, id); + } + + private boolean isEmptyPkValue(Object pkValue) { + if (pkValue == null) { + return true; + } + if (pkValue instanceof String) { + String pkValueStr = (String) pkValue; + return pkValueStr.isEmpty(); + } + return false; + } + + public abstract class VertexKVPairs { + + public final VertexLabel vertexLabel; + // General properties + public Map properties; + + public VertexKVPairs(VertexLabel vertexLabel) { + this.vertexLabel = vertexLabel; + this.properties = null; + } + + public abstract void extractFromVertex(String[] names, + Object[] values); + + public abstract void extractFromEdge(String[] names, Object[] values, + int[] fieldIndexes); + + public abstract List buildVertices(boolean withProperty); + + public List splitField(String key, Object value) { + return DataTypeUtil.splitField(key, value, struct.input()); + } + } + + public class VertexIdKVPairs extends VertexKVPairs { + + // The idField(raw field), like: id + private String idField; + // The single idValue(mapped), like: A -> 1 + private Object idValue; + + public VertexIdKVPairs(VertexLabel vertexLabel) { + super(vertexLabel); + } + + @Override + public void extractFromVertex(String[] names, Object[] values) { + // General properties + this.properties = new HashMap<>(); + for (int i = 0; i < names.length; i++) { + String fieldName = names[i]; + Object fieldValue = values[i]; + if (!retainField(fieldName, fieldValue)) { + continue; + } + if (isIdField(fieldName)) { + this.idField = fieldName; + this.idValue = mappingValue(fieldName, fieldValue); + } else { + String key = mapping().mappingField(fieldName); + Object value = mappingValue(fieldName, fieldValue); + this.properties.put(key, value); + } + } + } + + @Override + public void extractFromEdge(String[] names, Object[] values, + int[] fieldIndexes) { + assert fieldIndexes.length == 1; + String fieldName = names[fieldIndexes[0]]; + Object fieldValue = values[fieldIndexes[0]]; + this.idField = fieldName; + this.idValue = mappingValue(fieldName, fieldValue); + } + + @Override + public List buildVertices(boolean withProperty) { + Vertex vertex = new Vertex(vertexLabel.name()); + customizeId(vertexLabel, vertex, this.idField, this.idValue); + if (vertexIdEmpty(vertexLabel, vertex)) { + return ImmutableList.of(); + } + if (withProperty) { + String key = mapping().mappingField(this.idField); + // The id field is also used as a general property + if (vertexLabel.properties().contains(key)) { + addProperty(vertex, key, this.idValue); + } + addProperties(vertex, this.properties); + checkNonNullableKeys(vertex); + } + return ImmutableList.of(vertex); + } + } + + public class VertexFlatIdKVPairs extends VertexKVPairs { + + // The idField(raw field), like: id + private String idField; + /* + * The multiple idValues(spilted and mapped) + * like: A|B|C -> [1,2,3] + */ + private List idValues; + + public VertexFlatIdKVPairs(VertexLabel vertexLabel) { + super(vertexLabel); + } + + @Override + public void extractFromVertex(String[] names, Object[] values) { + // General properties + this.properties = new HashMap<>(); + for (int i = 0; i < names.length; i++) { + String fieldName = names[i]; + Object fieldValue = values[i]; + if (!retainField(fieldName, fieldValue)) { + continue; + } + if (isIdField(fieldName)) { + this.idField = fieldName; + List rawIdValues = splitField(fieldName, + fieldValue); + this.idValues = rawIdValues.stream().map(rawIdValue -> { + return mappingValue(fieldName, rawIdValue); + }).collect(Collectors.toList()); + } else { + String key = mapping().mappingField(fieldName); + Object value = mappingValue(fieldName, fieldValue); + this.properties.put(key, value); + } + } + } + + @Override + public void extractFromEdge(String[] names, Object[] values, + int[] fieldIndexes) { + assert fieldIndexes.length == 1; + String fieldName = names[fieldIndexes[0]]; + Object fieldValue = values[fieldIndexes[0]]; + this.idField = fieldName; + List rawIdValues = splitField(fieldName, fieldValue); + this.idValues = rawIdValues.stream().map(rawIdValue -> { + return mappingValue(fieldName, rawIdValue); + }).collect(Collectors.toList()); + } + + @Override + public List buildVertices(boolean withProperty) { + List vertices = new ArrayList<>(this.idValues.size()); + for (Object idValue : this.idValues) { + Vertex vertex = new Vertex(vertexLabel.name()); + customizeId(vertexLabel, vertex, this.idField, idValue); + if (vertexIdEmpty(vertexLabel, vertex)) { + continue; + } + if (withProperty) { + String key = mapping().mappingField(this.idField); + // The id field is also used as a general property + if (vertexLabel.properties().contains(key)) { + addProperty(vertex, key, idValue); + } + addProperties(vertex, this.properties); + checkNonNullableKeys(vertex); + } + vertices.add(vertex); + } + return vertices; + } + } + + public class VertexPkKVPairs extends VertexKVPairs { + + /* + * The primary key names(mapped), allowed multiple + * like: [p_name,p_age] -> [name,age] + */ + private List pkNames; + /* + * The primary values(mapped), length is the same as pkNames + * like: [m,2] -> [marko,18] + */ + private Object[] pkValues; + + public VertexPkKVPairs(VertexLabel vertexLabel) { + super(vertexLabel); + } + + @Override + public void extractFromVertex(String[] names, Object[] values) { + List primaryKeys = this.vertexLabel.primaryKeys(); + this.pkNames = primaryKeys; + this.pkValues = new Object[primaryKeys.size()]; + // General properties + this.properties = new HashMap<>(); + for (int i = 0; i < names.length; i++) { + String fieldName = names[i]; + Object fieldValue = values[i]; + if (!retainField(fieldName, fieldValue)) { + continue; + } + String key = mapping().mappingField(fieldName); + if (primaryKeys.contains(key)) { + // Don't put priamry key/values into general properties + int index = primaryKeys.indexOf(key); + Object pkValue = mappingValue(fieldName, fieldValue); + this.pkValues[index] = pkValue; + } else { + Object value = mappingValue(fieldName, fieldValue); + this.properties.put(key, value); + } + } + } + + @Override + public void extractFromEdge(String[] names, Object[] values, + int[] fieldIndexes) { + this.pkNames = new ArrayList<>(fieldIndexes.length); + for (int fieldIndex : fieldIndexes) { + String fieldName = names[fieldIndex]; + String mappingField = mapping().mappingField(fieldName); + this.pkNames.add(mappingField); + } + List primaryKeys = this.vertexLabel.primaryKeys(); + E.checkArgument(ListUtils.isEqualList(this.pkNames, primaryKeys), + "Make sure the the primary key fields %s are " + + "not empty, or check whether the headers or " + + "field_mapping are configured correctly", + primaryKeys); + this.pkValues = new Object[this.pkNames.size()]; + for (int i = 0; i < fieldIndexes.length; i++) { + String fieldName = names[fieldIndexes[i]]; + Object fieldValue = values[fieldIndexes[i]]; + Object pkValue = mappingValue(fieldName, fieldValue); + this.pkValues[i] = pkValue; + } + } + + @Override + public List buildVertices(boolean withProperty) { + checkPrimaryValuesValid(vertexLabel, this.pkValues); + for (int i = 0; i < this.pkNames.size(); i++) { + if (isEmptyPkValue(this.pkValues[i])) { + return ImmutableList.of(); + } + Object pkValue = convertPropertyValue(this.pkNames.get(i), + this.pkValues[i]); + this.pkValues[i] = pkValue; + } + String id = spliceVertexId(vertexLabel, this.pkValues); + checkVertexIdLength(id); + + Vertex vertex = new Vertex(vertexLabel.name()); + // NOTE: withProperty is true means that parsing vertex + if (withProperty) { + for (int i = 0; i < this.pkNames.size(); i++) { + addProperty(vertex, this.pkNames.get(i), + this.pkValues[i], false); + } + addProperties(vertex, this.properties); + checkNonNullableKeys(vertex); + } + vertex.id(id); + return ImmutableList.of(vertex); + } + } + + public class VertexFlatPkKVPairs extends VertexKVPairs { + + /* + * The primary key name(mapped), must be single + * like: p_name -> name + */ + private String pkName; + /* + * The primary values(splited and mapped) + * like: m|v -> [marko,vadas] + */ + private List pkValues; + + public VertexFlatPkKVPairs(VertexLabel vertexLabel) { + super(vertexLabel); + } + + @Override + public void extractFromVertex(String[] names, Object[] values) { + List primaryKeys = vertexLabel.primaryKeys(); + E.checkArgument(primaryKeys.size() == 1, + "In case unfold is true, just supported " + + "a single primary key"); + this.pkName = primaryKeys.get(0); + // General properties + this.properties = new HashMap<>(); + boolean handledPk = false; + for (int i = 0; i < names.length; i++) { + String fieldName = names[i]; + Object fieldValue = values[i]; + if (!retainField(fieldName, fieldValue)) { + continue; + } + String key = mapping().mappingField(fieldName); + if (!handledPk && primaryKeys.contains(key)) { + // Don't put priamry key/values into general properties + List rawPkValues = splitField(fieldName, + fieldValue); + this.pkValues = rawPkValues.stream().map(rawPkValue -> { + return mappingValue(fieldName, rawPkValue); + }).collect(Collectors.toList()); + handledPk = true; + } else { + Object value = mappingValue(fieldName, fieldValue); + this.properties.put(key, value); + } + } + } + + @Override + public void extractFromEdge(String[] names, Object[] values, + int[] fieldIndexes) { + List primaryKeys = vertexLabel.primaryKeys(); + E.checkArgument(fieldIndexes.length == 1 && primaryKeys.size() == 1, + "In case unfold is true, just supported " + + "a single primary key"); + String fieldName = names[fieldIndexes[0]]; + this.pkName = mapping().mappingField(fieldName); + String primaryKey = primaryKeys.get(0); + E.checkArgument(this.pkName.equals(primaryKey), + "Make sure the the primary key field '%s' is " + + "not empty, or check whether the headers or " + + "field_mapping are configured correctly", + primaryKey); + Object fieldValue = values[fieldIndexes[0]]; + List rawPkValues = splitField(fieldName, fieldValue); + this.pkValues = rawPkValues.stream().map(rawPkValue -> { + return mappingValue(fieldName, rawPkValue); + }).collect(Collectors.toList()); + } + + @Override + public List buildVertices(boolean withProperty) { + E.checkArgument(this.pkValues != null, + "The primary values shouldn't be null"); + List vertices = new ArrayList<>(this.pkValues.size()); + for (Object pkValue : this.pkValues) { + if (isEmptyPkValue(pkValue)) { + continue; + } + pkValue = convertPropertyValue(this.pkName, pkValue); + String id = spliceVertexId(vertexLabel, pkValue); + checkVertexIdLength(id); + + Vertex vertex = new Vertex(vertexLabel.name()); + // NOTE: withProperty is true means that parsing vertex + if (withProperty) { + addProperty(vertex, this.pkName, pkValue, false); + addProperties(vertex, this.properties); + checkNonNullableKeys(vertex); + } + vertex.id(id); + vertices.add(vertex); + } + return vertices; + } + } +} diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/struct/HugeType.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/struct/HugeType.java new file mode 100644 index 000000000..7c88567a5 --- /dev/null +++ b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/struct/HugeType.java @@ -0,0 +1,190 @@ +/* + * Copyright 2017 HugeGraph Authors + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.baidu.hugegraph.serializer.direct.struct; + +import java.util.HashMap; +import java.util.Map; + +public enum HugeType { + + UNKNOWN(0, "UNKNOWN"), + + /* Schema types */ + VERTEX_LABEL(1, "VL"), + EDGE_LABEL(2, "EL"), + PROPERTY_KEY(3, "PK"), + INDEX_LABEL(4, "IL"), + + META(40, "M"), + COUNTER(50, "C"), + + /* Data types */ + VERTEX(101, "V"), + // System meta + SYS_PROPERTY(102, "S"), + // Property + PROPERTY(103, "U"), + // Vertex aggregate property + AGGR_PROPERTY_V(104, "VP"), + // Edge aggregate property + AGGR_PROPERTY_E(105, "EP"), + // Olap property + OLAP(106, "AP"), + // Edge + EDGE(120, "E"), + // Edge's direction is OUT for the specified vertex + EDGE_OUT(130, "O"), + // Edge's direction is IN for the specified vertex + EDGE_IN(140, "I"), + + SECONDARY_INDEX(150, "SI"), + VERTEX_LABEL_INDEX(151, "VI"), + EDGE_LABEL_INDEX(152, "EI"), + RANGE_INT_INDEX(160, "II"), + RANGE_FLOAT_INDEX(161, "FI"), + RANGE_LONG_INDEX(162, "LI"), + RANGE_DOUBLE_INDEX(163, "DI"), + SEARCH_INDEX(170, "AI"), + SHARD_INDEX(175, "HI"), + UNIQUE_INDEX(178, "UI"), + + TASK(180, "T"), + + // System schema + SYS_SCHEMA(250, "SS"), + + MAX_TYPE(255, "~"); + + private byte type = 0; + private String name; + + private static final Map ALL_NAME; + + static { + ALL_NAME = new HashMap<>(); + for (HugeType type : values()) { + ALL_NAME.put(type.name, type); + } + } + + HugeType(int type, String name) { + assert type < 256; + this.type = (byte) type; + this.name = name; + } + + public byte code() { + return this.type; + } + + public String string() { + return this.name; + } + + public String readableName() { + return this.name().replace('_', ' ').toLowerCase(); + } + + public boolean isSchema() { + return this == HugeType.VERTEX_LABEL || + this == HugeType.EDGE_LABEL || + this == HugeType.PROPERTY_KEY || + this == HugeType.INDEX_LABEL; + } + + public boolean isGraph() { + return this.isVertex() || this.isEdge(); + } + + public boolean isVertex() { + return this == HugeType.VERTEX; + } + + public boolean isEdge() { + return this == EDGE || this == EDGE_OUT || this == EDGE_IN; + } + + public boolean isIndex() { + return this == VERTEX_LABEL_INDEX || this == EDGE_LABEL_INDEX || + this == SECONDARY_INDEX || this == SEARCH_INDEX || + this == RANGE_INT_INDEX || this == RANGE_FLOAT_INDEX || + this == RANGE_LONG_INDEX || this == RANGE_DOUBLE_INDEX || + this == SHARD_INDEX || this == UNIQUE_INDEX; + } + + public boolean isStringIndex() { + return this == VERTEX_LABEL_INDEX || this == EDGE_LABEL_INDEX || + this == SECONDARY_INDEX || this == SEARCH_INDEX || + this == SHARD_INDEX || this == UNIQUE_INDEX; + } + + public boolean isNumericIndex() { + return this == RANGE_INT_INDEX || this == RANGE_FLOAT_INDEX || + this == RANGE_LONG_INDEX || this == RANGE_DOUBLE_INDEX || + this == SHARD_INDEX; + } + + public boolean isSecondaryIndex() { + return this == VERTEX_LABEL_INDEX || this == EDGE_LABEL_INDEX || + this == SECONDARY_INDEX; + } + + public boolean isSearchIndex() { + return this == SEARCH_INDEX; + } + + public boolean isRangeIndex() { + return this == RANGE_INT_INDEX || this == RANGE_FLOAT_INDEX || + this == RANGE_LONG_INDEX || this == RANGE_DOUBLE_INDEX; + } + + public boolean isRange4Index() { + return this == RANGE_INT_INDEX || this == RANGE_FLOAT_INDEX; + } + + public boolean isRange8Index() { + return this == RANGE_LONG_INDEX || this == RANGE_DOUBLE_INDEX; + } + + public boolean isShardIndex() { + return this == SHARD_INDEX; + } + + public boolean isUniqueIndex() { + return this == UNIQUE_INDEX; + } + + public boolean isVertexAggregateProperty() { + return this == AGGR_PROPERTY_V; + } + + public boolean isEdgeAggregateProperty() { + return this == AGGR_PROPERTY_E; + } + + public boolean isAggregateProperty() { + return this.isVertexAggregateProperty() || + this.isEdgeAggregateProperty(); + } + + public static HugeType fromString(String type) { + return ALL_NAME.get(type); + } +} diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/BytesBuffer.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/BytesBuffer.java new file mode 100644 index 000000000..75884aae5 --- /dev/null +++ b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/BytesBuffer.java @@ -0,0 +1,909 @@ +/* + * Copyright 2017 HugeGraph Authors + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.baidu.hugegraph.serializer.direct.util; + +import static com.baidu.hugegraph.serializer.direct.BinaryEntry.BinaryId; +import static com.baidu.hugegraph.serializer.direct.util.Id.UUID_LENGTH; + +import java.io.OutputStream; +import java.nio.Buffer; +import java.nio.ByteBuffer; +import java.util.Arrays; +import java.util.Collection; +import java.util.Date; +import java.util.UUID; + +import com.baidu.hugegraph.backend.id.EdgeId; +import com.baidu.hugegraph.backend.id.Id.IdType; +import com.baidu.hugegraph.backend.id.IdGenerator; +import com.baidu.hugegraph.serializer.direct.struct.DataType; +import com.baidu.hugegraph.serializer.direct.struct.HugeType; +import com.baidu.hugegraph.serializer.direct.util.Id.IdType; +import com.baidu.hugegraph.structure.constant.Cardinality; +import com.baidu.hugegraph.structure.schema.PropertyKey; +import com.baidu.hugegraph.util.Bytes; +import com.baidu.hugegraph.util.E; + +/** + * class BytesBuffer is an util for read/write binary + */ +public final class BytesBuffer extends OutputStream { + + public static final int BYTE_LEN = Byte.BYTES; + public static final int SHORT_LEN = Short.BYTES; + public static final int INT_LEN = Integer.BYTES; + public static final int LONG_LEN = Long.BYTES; + public static final int CHAR_LEN = Character.BYTES; + public static final int FLOAT_LEN = Float.BYTES; + public static final int DOUBLE_LEN = Double.BYTES; + public static final int BLOB_LEN = 4; + + public static final int UINT8_MAX = ((byte) -1) & 0xff; + public static final int UINT16_MAX = ((short) -1) & 0xffff; + public static final long UINT32_MAX = (-1) & 0xffffffffL; + + // NOTE: +1 to let code 0 represent length 1 + public static final int ID_LEN_MASK = 0x7f; + public static final int ID_LEN_MAX = 0x7f + 1; // 128 + public static final int BIG_ID_LEN_MAX = 0x7fff + 1; // 32768 + + public static final byte STRING_ENDING_BYTE = (byte) 0x00; + public static final byte STRING_ENDING_BYTE_FF = (byte) 0xff; + public static final int STRING_LEN_MAX = UINT16_MAX; + public static final long BLOB_LEN_MAX = 1 * Bytes.GB; + + // The value must be in range [8, ID_LEN_MAX] + public static final int INDEX_HASH_ID_THRESHOLD = 32; + + public static final int DEFAULT_CAPACITY = 64; + public static final int MAX_BUFFER_CAPACITY = 128 * 1024 * 1024; // 128M + + public static final int BUF_EDGE_ID = 128; + public static final int BUF_PROPERTY = 64; + + public static final byte[] BYTES_EMPTY = new byte[0]; + + private ByteBuffer buffer; + private final boolean resize; + + public BytesBuffer() { + this(DEFAULT_CAPACITY); + } + + public BytesBuffer(int capacity) { + E.checkArgument(capacity <= MAX_BUFFER_CAPACITY, + "Capacity exceeds max buffer capacity: %s", + MAX_BUFFER_CAPACITY); + this.buffer = ByteBuffer.allocate(capacity); + this.resize = true; + } + + public BytesBuffer(ByteBuffer buffer) { + E.checkNotNull(buffer, "buffer"); + this.buffer = buffer; + this.resize = false; + } + + public static BytesBuffer allocate(int capacity) { + return new BytesBuffer(capacity); + } + + public static BytesBuffer wrap(ByteBuffer buffer) { + return new BytesBuffer(buffer); + } + + public static BytesBuffer wrap(byte[] array) { + return new BytesBuffer(ByteBuffer.wrap(array)); + } + + public static BytesBuffer wrap(byte[] array, int offset, int length) { + return new BytesBuffer(ByteBuffer.wrap(array, offset, length)); + } + + public ByteBuffer asByteBuffer() { + return this.buffer; + } + + public BytesBuffer forReadWritten() { + ((Buffer) this.buffer).flip(); + return this; + } + + public BytesBuffer forReadAll() { + this.buffer.position(this.buffer.limit()); + return this; + } + + public byte[] array() { + return this.buffer.array(); + } + + public byte[] bytes() { + byte[] bytes = this.buffer.array(); + int position = this.buffer.position(); + if (position == bytes.length) { + return bytes; + } else { + return Arrays.copyOf(bytes, position); + } + } + + public int position() { + return this.buffer.position(); + } + + public BytesBuffer copyFrom(BytesBuffer other) { + this.write(other.bytes()); + return this; + } + + public int remaining() { + return this.buffer.remaining(); + } + + private void require(int size) { + // Does need to resize? + if (this.buffer.limit() - this.buffer.position() >= size) { + return; + } + // Can't resize for wrapped buffer since will change the origin ref + E.checkState(this.resize, "Can't resize for wrapped buffer"); + + // Extra capacity as buffer + int newcapacity = size + this.buffer.limit() + DEFAULT_CAPACITY; + E.checkArgument(newcapacity <= MAX_BUFFER_CAPACITY, + "Capacity exceeds max buffer capacity: %s", + MAX_BUFFER_CAPACITY); + ByteBuffer newBuffer = ByteBuffer.allocate(newcapacity); + this.buffer.flip(); + newBuffer.put(this.buffer); + this.buffer = newBuffer; + } + + public BytesBuffer write(byte val) { + require(BYTE_LEN); + this.buffer.put(val); + return this; + } + + @Override + public void write(int val) { + assert val <= UINT8_MAX; + require(BYTE_LEN); + this.buffer.put((byte) val); + } + + @Override + public void write(byte[] val) { + require(BYTE_LEN * val.length); + this.buffer.put(val); + } + + @Override + public void write(byte[] val, int offset, int length) { + require(BYTE_LEN * length); + this.buffer.put(val, offset, length); + } + + public BytesBuffer writeBoolean(boolean val) { + this.write(val ? 1 : 0); + return this; + } + + public BytesBuffer writeChar(char val) { + require(CHAR_LEN); + this.buffer.putChar(val); + return this; + } + + public BytesBuffer writeShort(short val) { + require(SHORT_LEN); + this.buffer.putShort(val); + return this; + } + + public BytesBuffer writeInt(int val) { + require(INT_LEN); + this.buffer.putInt(val); + return this; + } + + public BytesBuffer writeLong(long val) { + require(LONG_LEN); + this.buffer.putLong(val); + return this; + } + + public BytesBuffer writeFloat(float val) { + require(FLOAT_LEN); + this.buffer.putFloat(val); + return this; + } + + public BytesBuffer writeDouble(double val) { + require(DOUBLE_LEN); + this.buffer.putDouble(val); + return this; + } + + public byte peek() { + return this.buffer.get(this.buffer.position()); + } + + public byte peekLast() { + return this.buffer.get(this.buffer.capacity() - 1); + } + + public byte read() { + return this.buffer.get(); + } + + public byte[] read(int length) { + byte[] bytes = new byte[length]; + this.buffer.get(bytes); + return bytes; + } + + public boolean readBoolean() { + return this.buffer.get() == 0 ? false : true; + } + + public char readChar() { + return this.buffer.getChar(); + } + + public short readShort() { + return this.buffer.getShort(); + } + + public int readInt() { + return this.buffer.getInt(); + } + + public long readLong() { + return this.buffer.getLong(); + } + + public float readFloat() { + return this.buffer.getFloat(); + } + + public double readDouble() { + return this.buffer.getDouble(); + } + + public BytesBuffer writeBytes(byte[] bytes) { + E.checkArgument(bytes.length <= UINT16_MAX, + "The max length of bytes is %s, but got %s", + UINT16_MAX, bytes.length); + require(SHORT_LEN + bytes.length); + this.writeVInt(bytes.length); + this.write(bytes); + return this; + } + + public byte[] readBytes() { + int length = this.readVInt(); + assert length >= 0; + return this.read(length); + } + + public BytesBuffer writeBigBytes(byte[] bytes) { + E.checkArgument(bytes.length <= BLOB_LEN_MAX, + "The max length of bytes is %s, but got %s", + BLOB_LEN_MAX, bytes.length); + require(BLOB_LEN + bytes.length); + this.writeVInt(bytes.length); + this.write(bytes); + return this; + } + + public byte[] readBigBytes() { + int length = this.readVInt(); + assert length >= 0; + return this.read(length); + } + + public BytesBuffer writeStringRaw(String val) { + this.write(StringEncoding.encode(val)); + return this; + } + + public BytesBuffer writeString(String val) { + byte[] bytes = StringEncoding.encode(val); + this.writeBytes(bytes); + return this; + } + + public String readString() { + return StringEncoding.decode(this.readBytes()); + } + + public BytesBuffer writeStringWithEnding(String value) { + if (!value.isEmpty()) { + byte[] bytes = StringEncoding.encode(value); + /* + * assert '0x00'/'0xFF' not exist in string index id + * NOTE: + * 0x00 is NULL in UTF8(or ASCII) bytes + * 0xFF is not a valid byte in UTF8 bytes + */ + assert !Bytes.contains(bytes, STRING_ENDING_BYTE_FF) : + "Invalid UTF8 bytes: " + value; + if (Bytes.contains(bytes, STRING_ENDING_BYTE)) { + E.checkArgument(false, + "Can't contains byte '0x00' in string: '%s'", + value); + } + this.write(bytes); + } + /* + * Choose 0x00 as ending symbol (see #1057) + * The following is out of date: + * A reasonable ending symbol should be 0x00(to ensure order), but + * considering that some backends like PG do not support 0x00 string, + * so choose 0xFF currently. + */ + this.write(STRING_ENDING_BYTE); + return this; + } + + public String readStringWithEnding() { + return StringEncoding.decode(this.readBytesWithEnding()); + } + + public BytesBuffer writeStringToRemaining(String value) { + byte[] bytes = StringEncoding.encode(value); + this.write(bytes); + return this; + } + + public String readStringFromRemaining() { + byte[] bytes = new byte[this.buffer.remaining()]; + this.buffer.get(bytes); + return StringEncoding.decode(bytes); + } + + public BytesBuffer writeUInt8(int val) { + assert val <= UINT8_MAX; + this.write(val); + return this; + } + + public int readUInt8() { + return this.read() & 0x000000ff; + } + + public BytesBuffer writeUInt16(int val) { + assert val <= UINT16_MAX; + this.writeShort((short) val); + return this; + } + + public int readUInt16() { + return this.readShort() & 0x0000ffff; + } + + public BytesBuffer writeUInt32(long val) { + assert val <= UINT32_MAX; + this.writeInt((int) val); + return this; + } + + public long readUInt32() { + return this.readInt() & 0xffffffffL; + } + + public BytesBuffer writeVInt(int value) { + // NOTE: negative numbers are not compressed + if (value > 0x0fffffff || value < 0) { + this.write(0x80 | ((value >>> 28) & 0x7f)); + } + if (value > 0x1fffff || value < 0) { + this.write(0x80 | ((value >>> 21) & 0x7f)); + } + if (value > 0x3fff || value < 0) { + this.write(0x80 | ((value >>> 14) & 0x7f)); + } + if (value > 0x7f || value < 0) { + this.write(0x80 | ((value >>> 7) & 0x7f)); + } + this.write(value & 0x7f); + + return this; + } + + public int readVInt() { + byte leading = this.read(); + E.checkArgument(leading != 0x80, + "Unexpected varint with leading byte '0x%s'", + Bytes.toHex(leading)); + int value = leading & 0x7f; + if (leading >= 0) { + return value; + } + + int i = 1; + for (; i < 5; i++) { + byte b = this.read(); + if (b >= 0) { + value = b | (value << 7); + break; + } else { + value = (b & 0x7f) | (value << 7); + } + } + + E.checkArgument(i < 5, + "Unexpected varint %s with too many bytes(%s)", + value, i + 1); + E.checkArgument(i < 4 || (leading & 0x70) == 0, + "Unexpected varint %s with leading byte '0x%s'", + value, Bytes.toHex(leading)); + return value; + } + + public BytesBuffer writeVLong(long value) { + if (value < 0) { + this.write((byte) 0x81); + } + if (value > 0xffffffffffffffL || value < 0L) { + this.write(0x80 | ((int) (value >>> 56) & 0x7f)); + } + if (value > 0x1ffffffffffffL || value < 0L) { + this.write(0x80 | ((int) (value >>> 49) & 0x7f)); + } + if (value > 0x3ffffffffffL || value < 0L) { + this.write(0x80 | ((int) (value >>> 42) & 0x7f)); + } + if (value > 0x7ffffffffL || value < 0L) { + this.write(0x80 | ((int) (value >>> 35) & 0x7f)); + } + if (value > 0xfffffffL || value < 0L) { + this.write(0x80 | ((int) (value >>> 28) & 0x7f)); + } + if (value > 0x1fffffL || value < 0L) { + this.write(0x80 | ((int) (value >>> 21) & 0x7f)); + } + if (value > 0x3fffL || value < 0L) { + this.write(0x80 | ((int) (value >>> 14) & 0x7f)); + } + if (value > 0x7fL || value < 0L) { + this.write(0x80 | ((int) (value >>> 7) & 0x7f)); + } + this.write((int) value & 0x7f); + + return this; + } + + public long readVLong() { + byte leading = this.read(); + E.checkArgument(leading != 0x80, + "Unexpected varlong with leading byte '0x%s'", + Bytes.toHex(leading)); + long value = leading & 0x7fL; + if (leading >= 0) { + assert (leading & 0x80) == 0; + return value; + } + + int i = 1; + for (; i < 10; i++) { + byte b = this.read(); + if (b >= 0) { + value = b | (value << 7); + break; + } else { + value = (b & 0x7f) | (value << 7); + } + } + + E.checkArgument(i < 10, + "Unexpected varlong %s with too many bytes(%s)", + value, i + 1); + E.checkArgument(i < 9 || (leading & 0x7e) == 0, + "Unexpected varlong %s with leading byte '0x%s'", + value, Bytes.toHex(leading)); + return value; + } + + public BytesBuffer writeProperty(PropertyKey pkey, Object value) { + if (pkey.cardinality() == Cardinality.SINGLE) { + this.writeProperty(pkey.dataType(), value); + return this; + } + + assert pkey.cardinality() == Cardinality.LIST || + pkey.cardinality() == Cardinality.SET; + Collection values = (Collection) value; + this.writeVInt(values.size()); + for (Object o : values) { + this.writeProperty(pkey.dataType(), o); + } + return this; + } + + public Object readProperty(PropertyKey pkey) { + if (pkey.cardinality() == Cardinality.SINGLE) { + return this.readProperty(pkey.dataType()); + } + + assert pkey.cardinality() == Cardinality.LIST || + pkey.cardinality() == Cardinality.SET; + int size = this.readVInt(); + Collection values = pkey.newValue(); + for (int i = 0; i < size; i++) { + values.add(this.readProperty(pkey.dataType())); + } + return values; + } + + public void writeProperty(DataType dataType, Object value) { + switch (dataType) { + case BOOLEAN: + this.writeVInt(((Boolean) value) ? 1 : 0); + break; + case BYTE: + this.writeVInt((Byte) value); + break; + case INT: + this.writeVInt((Integer) value); + break; + case FLOAT: + this.writeFloat((Float) value); + break; + case LONG: + this.writeVLong((Long) value); + break; + case DATE: + this.writeVLong(((Date) value).getTime()); + break; + case DOUBLE: + this.writeDouble((Double) value); + break; + case TEXT: + this.writeString((String) value); + break; + case UUID: + UUID uuid = (UUID) value; + // Generally writeVLong(uuid) can't save space + this.writeLong(uuid.getMostSignificantBits()); + this.writeLong(uuid.getLeastSignificantBits()); + break; + default: // ignore + } + } + + public Object readProperty(DataType dataType) { + switch (dataType) { + case BOOLEAN: + return this.readVInt() == 1; + case BYTE: + return (byte) this.readVInt(); + case INT: + return this.readVInt(); + case FLOAT: + return this.readFloat(); + case LONG: + return this.readVLong(); + case DATE: + return new Date(this.readVLong()); + case DOUBLE: + return this.readDouble(); + case TEXT: + return this.readString(); + case UUID: + return new UUID(this.readLong(), this.readLong()); + default: // ignore + } + } + + public BytesBuffer writeId(Id id) { + return this.writeId(id, false); + } + + public BytesBuffer writeId(Id id, boolean big) { + switch (id.type()) { + case EDGE: + // Edge Id + this.writeUInt8(0x7e); // 0b01111110 means EdgeId + this.writeEdgeId(id); + break; + default: + // String Id + bytes = id.asBytes(); + int len = bytes.length; + E.checkArgument(len > 0, "Can't write empty id"); + if (!big) { + E.checkArgument(len <= ID_LEN_MAX, + "Id max length is %s, but got %s {%s}", + ID_LEN_MAX, len, id); + len -= 1; // mapping [1, 128] to [0, 127] + this.writeUInt8(len | 0x80); + } else { + E.checkArgument(len <= BIG_ID_LEN_MAX, + "Big id max length is %s, but got %s {%s}", + BIG_ID_LEN_MAX, len, id); + len -= 1; + int high = len >> 8; + int low = len & 0xff; + this.writeUInt8(high | 0x80); + this.writeUInt8(low); + } + this.write(bytes); + break; + } + return this; + } + + public Id readId() { + return this.readId(false); + } + + public Id readId(boolean big) { + byte b = this.read(); + boolean number = (b & 0x80) == 0; + if (number) { + if (b == 0x7f) { + // UUID + return IdGenerator.of(this.read(UUID_LENGTH), IdType.UUID); + } else if (b == 0x7e) { + // Edge Id + return this.readEdgeId(); + } else { + // Number Id + return IdGenerator.of(this.readNumber(b)); + } + } else { + // String Id + int len = b & ID_LEN_MASK; + if (big) { + int high = len << 8; + int low = this.readUInt8(); + len = high + low; + } + len += 1; // restore [0, 127] to [1, 128] + byte[] id = this.read(len); + return IdGenerator.of(id, IdType.STRING); + } + } + + public BytesBuffer writeEdgeId(Id id) { + // owner-vertex + dir + edge-label + sort-values + other-vertex + EdgeId edge = (EdgeId) id; + this.writeId(edge.ownerVertexId()); + this.write(edge.directionCode()); + this.writeId(edge.edgeLabelId()); + this.writeStringWithEnding(edge.sortValues()); + this.writeId(edge.otherVertexId()); + return this; + } + + public Id readEdgeId() { + return new EdgeId(this.readId(), EdgeId.directionFromCode(this.read()), + this.readId(), this.readStringWithEnding(), + this.readId()); + } + + public BytesBuffer writeIndexId(Id id, HugeType type) { + return this.writeIndexId(id, type, true); + } + + public BytesBuffer writeIndexId(Id id, HugeType type, boolean withEnding) { + byte[] bytes = id.asBytes(); + int len = bytes.length; + E.checkArgument(len > 0, "Can't write empty id"); + + this.write(bytes); + if (type.isStringIndex()) { + if (Bytes.contains(bytes, STRING_ENDING_BYTE)) { + // Not allow STRING_ENDING_BYTE exist in string index id + E.checkArgument(false, + "The %s type index id can't contains " + + "byte '0x%s', but got: 0x%s", type, + Bytes.toHex(STRING_ENDING_BYTE), + Bytes.toHex(bytes)); + } + if (withEnding) { + this.writeStringWithEnding(""); + } + } + return this; + } + + public BinaryId readIndexId(HugeType type) { + byte[] id; + if (type.isRange4Index()) { + // IndexLabel 4 bytes + fieldValue 4 bytes + id = this.read(8); + } else if (type.isRange8Index()) { + // IndexLabel 4 bytes + fieldValue 8 bytes + id = this.read(12); + } else { + assert type.isStringIndex(); + id = this.readBytesWithEnding(); + } + return new BinaryId(id, IdGenerator.of(id, IdType.STRING)); + } + + public BinaryId asId() { + return new BinaryId(this.bytes(), null); + } + + public BinaryId parseId(HugeType type, boolean enablePartition) { + if (type.isIndex()) { + return this.readIndexId(type); + } + // Parse id from bytes + if ((type.isVertex() || type.isEdge()) && enablePartition) { + this.readShort(); + } + int start = this.buffer.position(); + /* + * Since edge id in edges table doesn't prefix with leading 0x7e, + * so readId() will return the source vertex id instead of edge id, + * can't call: type.isEdge() ? this.readEdgeId() : this.readId(); + */ + Id id = this.readId(); + int end = this.buffer.position(); + int len = end - start; + byte[] bytes = new byte[len]; + System.arraycopy(this.array(), start, bytes, 0, len); + return new BinaryId(bytes, id); + } + + private void writeNumber(long val) { + /* + * 8 kinds of number, 2 ~ 9 bytes number: + * 0b 0kkksxxx X... + * 0(1 bit) + kind(3 bits) + signed(1 bit) + number(n bits) + * + * 2 byte : 0b 0000 1xxx X(8 bits) [0, 2047] + * 0b 0000 0xxx X(8 bits) [-2048, -1] + * 3 bytes: 0b 0001 1xxx X X [0, 524287] + * 0b 0001 0xxx X X [-524288, -1] + * 4 bytes: 0b 0010 1xxx X X X [0, 134217727] + * 0b 0010 0xxx X X X [-134217728, -1] + * 5 bytes: 0b 0011 1xxx X X X X [0, 2^35 - 1] + * 0b 0011 0xxx X X X X [-2^35, -1] + * 6 bytes: 0b 0100 1xxx X X X X X [0, 2^43 - 1] + * 0b 0100 0xxx X X X X X [-2^43, -1] + * 7 bytes: 0b 0101 1xxx X X X X X X [0, 2^51 - 1] + * 0b 0101 0xxx X X X X X X [-2^51, -1] + * 8 bytes: 0b 0110 1xxx X X X X X X X [0, 2^59 - 1] + * 0b 0110 0xxx X X X X X X X [-2^59, -1] + * 9 bytes: 0b 0111 1000 X X X X X X X X [0, 2^64 - 1] + * 0b 0111 0000 X X X X X X X X [-2^64, -1] + * + * NOTE: 0b 0111 1111 is used by 128 bits UUID + * 0b 0111 1110 is used by EdgeId + */ + int positive = val >= 0 ? 0x08 : 0x00; + if (~0x7ffL <= val && val <= 0x7ffL) { + int high3bits = (int) (val >> 8) & 0x07; + this.writeUInt8(0x00 | positive | high3bits); + this.writeUInt8((byte) val); + } else if (~0x7ffffL <= val && val <= 0x7ffffL) { + int high3bits = (int) (val >> 16) & 0x07; + this.writeUInt8(0x10 | positive | high3bits); + this.writeShort((short) val); + } else if (~0x7ffffffL <= val && val <= 0x7ffffffL) { + int high3bits = (int) (val >> 24 & 0x07); + this.writeUInt8(0x20 | positive | high3bits); + this.write((byte) (val >> 16)); + this.writeShort((short) val); + } else if (~0x7ffffffffL <= val && val <= 0x7ffffffffL) { + int high3bits = (int) (val >> 32) & 0x07; + this.writeUInt8(0x30 | positive | high3bits); + this.writeInt((int) val); + } else if (~0x7ffffffffffL <= val && val <= 0x7ffffffffffL) { + int high3bits = (int) (val >> 40) & 0x07; + this.writeUInt8(0x40 | positive | high3bits); + this.write((byte) (val >> 32)); + this.writeInt((int) val); + } else if (~0x7ffffffffffffL <= val && val <= 0x7ffffffffffffL) { + int high3bits = (int) (val >> 48) & 0x07; + this.writeUInt8(0x50 | positive | high3bits); + this.writeShort((short) (val >> 32)); + this.writeInt((int) val); + } else if (~0x7ffffffffffffffL <= val && val <= 0x7ffffffffffffffL) { + int high3bits = (int) (val >> 56) & 0x07; + this.writeUInt8(0x60 | positive | high3bits); + this.write((byte) (val >> 48)); + this.writeShort((short) (val >> 32)); + this.writeInt((int) val); + } else { + // high3bits is always 0b000 for 9 bytes number + this.writeUInt8(0x70 | positive); + this.writeLong(val); + } + } + + private long readNumber(byte b) { + E.checkArgument((b & 0x80) == 0, + "Not a number type with prefix byte '0x%s'", + Bytes.toHex(b)); + // Parse the kind from byte 0kkksxxx + int kind = b >>> 4; + boolean positive = (b & 0x08) > 0; + long high3bits = b & 0x07; + long value = high3bits << ((kind + 1) * 8); + switch (kind) { + case 0: + value |= this.readUInt8(); + break; + case 1: + value |= this.readUInt16(); + break; + case 2: + value |= this.readUInt8() << 16 | this.readUInt16(); + break; + case 3: + value |= this.readUInt32(); + break; + case 4: + value |= (long) this.readUInt8() << 32 | this.readUInt32(); + break; + case 5: + value |= (long) this.readUInt16() << 32 | this.readUInt32(); + break; + case 6: + value |= (long) this.readUInt8() << 48 | + (long) this.readUInt16() << 32 | + this.readUInt32(); + break; + case 7: + assert high3bits == 0L; + value |= this.readLong(); + break; + default: + throw new AssertionError("Invalid length of number: " + kind); + } + if (!positive && kind < 7) { + // Restore the bits of the original negative number + long mask = Long.MIN_VALUE >> (52 - kind * 8); + value |= mask; + } + return value; + } + + private byte[] readBytesWithEnding() { + int start = this.buffer.position(); + boolean foundEnding = false; + int remaining = this.remaining(); + for (int i = 0; i < remaining; i++) { + byte current = this.read(); + if (current == STRING_ENDING_BYTE) { + foundEnding = true; + break; + } + } + E.checkArgument(foundEnding, "Not found ending '0x%s'", + Bytes.toHex(STRING_ENDING_BYTE)); + int end = this.buffer.position() - 1; + int len = end - start; + if (len <= 0) { + return BYTES_EMPTY; + } + byte[] bytes = new byte[len]; + System.arraycopy(this.array(), start, bytes, 0, len); + return bytes; + } +} diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/DataTypeUtil.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/DataTypeUtil.java new file mode 100644 index 000000000..7e3d87691 --- /dev/null +++ b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/DataTypeUtil.java @@ -0,0 +1,332 @@ +/* + * Copyright 2017 HugeGraph Authors + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.baidu.hugegraph.serializer.direct.util; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Date; +import java.util.List; +import java.util.Set; +import java.util.UUID; + +import com.baidu.hugegraph.loader.constant.Constants; +import com.baidu.hugegraph.loader.source.AbstractSource; +import com.baidu.hugegraph.loader.source.InputSource; +import com.baidu.hugegraph.loader.source.file.FileSource; +import com.baidu.hugegraph.loader.source.file.ListFormat; +import com.baidu.hugegraph.structure.constant.Cardinality; +import com.baidu.hugegraph.structure.constant.DataType; +import com.baidu.hugegraph.structure.schema.PropertyKey; +import com.baidu.hugegraph.util.DateUtil; +import com.baidu.hugegraph.util.E; +import com.baidu.hugegraph.util.InsertionOrderUtil; +import com.baidu.hugegraph.util.ReflectionUtil; +import com.google.common.base.Splitter; +import com.google.common.collect.ImmutableSet; + +public final class DataTypeUtil { + + private static final Set ACCEPTABLE_TRUE = ImmutableSet.of( + "true", "1", "yes", "y" + ); + private static final Set ACCEPTABLE_FALSE = ImmutableSet.of( + "false", "0", "no", "n" + ); + + public static boolean isSimpleValue(Object value) { + if (value == null) { + return false; + } + return ReflectionUtil.isSimpleType(value.getClass()); + } + + public static Object convert(Object value, PropertyKey propertyKey, InputSource source) { + E.checkArgumentNotNull(value, + "The value to be converted can't be null"); + + String key = propertyKey.name(); + DataType dataType = propertyKey.dataType(); + Cardinality cardinality = propertyKey.cardinality(); + switch (cardinality) { + case SINGLE: + return parseSingleValue(key, value, dataType, source); + case SET: + case LIST: + return parseMultiValues(key, value, dataType, cardinality, source); + default: + throw new AssertionError(String.format( + "Unsupported cardinality: '%s'", cardinality)); + } + } + + @SuppressWarnings("unchecked") + public static List splitField(String key, Object rawColumnValue, InputSource source) { + E.checkArgument(rawColumnValue != null, + "The value to be split can't be null"); + if (rawColumnValue instanceof Collection) { + return (List) rawColumnValue; + } + // TODO: Seems a bit violent + String rawValue = rawColumnValue.toString(); + return split(key, rawValue, source); + } + + public static long parseNumber(String key, Object rawValue) { + if (rawValue instanceof Number) { + return ((Number) rawValue).longValue(); + } else if (rawValue instanceof String) { + // trim() is a little time-consuming + return parseLong(((String) rawValue).trim()); + } + throw new IllegalArgumentException(String.format( + "The value(key='%s') must can be casted to Long, " + + "but got '%s'(%s)", key, rawValue, rawValue.getClass().getName())); + } + + public static UUID parseUUID(String key, Object rawValue) { + if (rawValue instanceof UUID) { + return (UUID) rawValue; + } else if (rawValue instanceof String) { + String value = ((String) rawValue).trim(); + if (value.contains("-")) { + return UUID.fromString(value); + } + // UUID represented by hex string + E.checkArgument(value.length() == 32, "Invalid UUID value(key='%s') '%s'", key, value); + String high = value.substring(0, 16); + String low = value.substring(16); + return new UUID(Long.parseUnsignedLong(high, 16), Long.parseUnsignedLong(low, 16)); + } + throw new IllegalArgumentException(String.format( + "Failed to convert value(key='%s') '%s'(%s) to UUID", + key, rawValue, rawValue.getClass())); + } + + private static Object parseSingleValue(String key, Object rawValue, + DataType dataType, + InputSource source) { + // Trim space if raw value is string + Object value = rawValue; + if (rawValue instanceof String) { + value = ((String) rawValue).trim(); + } + if (dataType.isNumber()) { + return parseNumber(key, value, dataType); + } else if (dataType.isBoolean()) { + return parseBoolean(key, value); + } else if (dataType.isDate()) { + E.checkState(source instanceof FileSource, + "Only accept FileSource when convert String value " + + "to Date, but got '%s'", source.getClass().getName()); + String dateFormat = ((FileSource) source).dateFormat(); + String timeZone = ((FileSource) source).timeZone(); + return parseDate(key, value, dateFormat, timeZone); + } else if (dataType.isUUID()) { + return parseUUID(key, value); + } + E.checkArgument(checkDataType(key, value, dataType), + "The value(key='%s') '%s'(%s) is not match with " + + "data type %s and can't convert to it", + key, value, value.getClass(), dataType); + return value; + } + + /** + * collection format: "obj1,obj2,...,objn" or "[obj1,obj2,...,objn]" ..etc + * TODO: After parsing to json, the order of the collection changed + * in some cases (such as list) + **/ + private static Object parseMultiValues(String key, Object values, + DataType dataType, + Cardinality cardinality, + InputSource source) { + // JSON file should not parse again + if (values instanceof Collection && + checkCollectionDataType(key, (Collection) values, dataType)) { + return values; + } + + E.checkState(values instanceof String, + "The value(key='%s') must be String type, " + + "but got '%s'(%s)", key, values); + String rawValue = (String) values; + List valueColl = split(key, rawValue, source); + Collection results = cardinality == Cardinality.LIST ? + InsertionOrderUtil.newList() : + InsertionOrderUtil.newSet(); + valueColl.forEach(value -> { + results.add(parseSingleValue(key, value, dataType, source)); + }); + E.checkArgument(checkCollectionDataType(key, results, dataType), + "Not all collection elems %s match with data type %s", + results, dataType); + return results; + } + + private static Boolean parseBoolean(String key, Object rawValue) { + if (rawValue instanceof Boolean) { + return (Boolean) rawValue; + } + if (rawValue instanceof String) { + String value = ((String) rawValue).toLowerCase(); + if (ACCEPTABLE_TRUE.contains(value)) { + return true; + } else if (ACCEPTABLE_FALSE.contains(value)) { + return false; + } else { + throw new IllegalArgumentException(String.format( + "Failed to convert '%s'(key='%s') to Boolean, " + + "the acceptable boolean strings are %s or %s", + key, rawValue, ACCEPTABLE_TRUE, ACCEPTABLE_FALSE)); + } + } + throw new IllegalArgumentException(String.format( + "Failed to convert value(key='%s') '%s'(%s) to Boolean", + key, rawValue, rawValue.getClass())); + } + + private static Number parseNumber(String key, Object value, + DataType dataType) { + E.checkState(dataType.isNumber(), + "The target data type must be number"); + + if (dataType.clazz().isInstance(value)) { + return (Number) value; + } + try { + switch (dataType) { + case BYTE: + return Byte.valueOf(value.toString()); + case INT: + return Integer.valueOf(value.toString()); + case LONG: + return parseLong(value.toString()); + case FLOAT: + return Float.valueOf(value.toString()); + case DOUBLE: + return Double.valueOf(value.toString()); + default: + throw new AssertionError(String.format( + "Number type only contains Byte, Integer, " + + "Long, Float, Double, but got %s", + dataType.clazz())); + } + } catch (NumberFormatException e) { + throw new IllegalArgumentException(String.format( + "Failed to convert value(key=%s) '%s'(%s) to Number", + key, value, value.getClass()), e); + } + } + + private static long parseLong(String rawValue) { + if (rawValue.startsWith("-")) { + return Long.parseLong(rawValue); + } else { + return Long.parseUnsignedLong(rawValue); + } + } + + private static Date parseDate(String key, Object value, + String dateFormat, String timeZone) { + if (value instanceof Date) { + return (Date) value; + } + if (value instanceof Number) { + return new Date(((Number) value).longValue()); + } else if (value instanceof String) { + if (Constants.TIMESTAMP.equals(dateFormat)) { + try { + long timestamp = Long.parseLong((String) value); + return new Date(timestamp); + } catch (NumberFormatException e) { + throw new IllegalArgumentException(String.format( + "Invalid timestamp value '%s'", value)); + } + } else { + return DateUtil.parse((String) value, dateFormat, timeZone); + } + } + throw new IllegalArgumentException(String.format( + "Failed to convert value(key='%s') '%s'(%s) to Date", + key, value, value.getClass())); + } + + private static List split(String key, String rawValue, + InputSource source) { + List valueColl = new ArrayList<>(); + if (rawValue.isEmpty()) { + return valueColl; + } + E.checkState(AbstractSource.class.isAssignableFrom(source.getClass()), + "Only accept AbstractSource when parse multi values, " + + "but got '%s'", source.getClass().getName()); + ListFormat listFormat = ((AbstractSource) source).listFormat(); + E.checkArgumentNotNull(listFormat, "The list_format must be set when " + + "parse list or set values"); + + String startSymbol = listFormat.startSymbol(); + String endSymbol = listFormat.endSymbol(); + E.checkArgument(rawValue.length() >= + startSymbol.length() + endSymbol.length(), + "The value(key='%s') '%s' length(%s) must be >= " + + "start symbol '%s' + end symbol '%s' length", + key, rawValue, rawValue.length(), + startSymbol, endSymbol); + E.checkArgument(rawValue.startsWith(startSymbol) && + rawValue.endsWith(endSymbol), + "The value(key='%s') must start with '%s' and " + + "end with '%s', but got '%s'", + key, startSymbol, endSymbol, rawValue); + rawValue = rawValue.substring(startSymbol.length(), + rawValue.length() - endSymbol.length()); + String elemDelimiter = listFormat.elemDelimiter(); + Splitter.on(elemDelimiter).split(rawValue).forEach(value -> { + if (!listFormat.ignoredElems().contains(value)) { + valueColl.add(value); + } + }); + return valueColl; + } + + /** + * Check type of the value valid + */ + private static boolean checkDataType(String key, Object value, + DataType dataType) { + if (value instanceof Number) { + return parseNumber(key, value, dataType) != null; + } + return dataType.clazz().isInstance(value); + } + + /** + * Check type of all the values(may be some of list properties) valid + */ + private static boolean checkCollectionDataType(String key, + Collection values, + DataType dataType) { + for (Object value : values) { + if (!checkDataType(key, value, dataType)) { + return false; + } + } + return true; + } +} diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/EdgeId.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/EdgeId.java new file mode 100644 index 000000000..ff8e84d2f --- /dev/null +++ b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/EdgeId.java @@ -0,0 +1,242 @@ +/* + * Copyright 2017 HugeGraph Authors + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.baidu.hugegraph.serializer.direct.util; + +import com.baidu.hugegraph.exception.NotFoundException; +import com.baidu.hugegraph.serializer.direct.struct.HugeType; +import com.baidu.hugegraph.structure.constant.Direction; +import com.baidu.hugegraph.util.E; +import com.baidu.hugegraph.util.IdUtil; + +/** + * Class used to format and parse id of edge, the edge id consists of: + * { source-vertex-id + edge-label + edge-name + target-vertex-id } + * NOTE: if we use `entry.type()` which is IN or OUT as a part of id, + * an edge's id will be different due to different directions (belongs + * to 2 owner vertex) + */ +public class EdgeId implements Id { + + private final Id ownerVertexId; + private final Direction direction; + private final Id edgeLabelId; + private final String sortValues; + private final Id otherVertexId; + + private final boolean directed; + private String cache; + + public EdgeId(Id ownerVertexId, Direction direction, Id edgeLabelId, + String sortValues, Id otherVertexId) { + this(ownerVertexId, direction, edgeLabelId, + sortValues, otherVertexId, false); + } + + public EdgeId(Id ownerVertexId, Direction direction, Id edgeLabelId, + String sortValues, Id otherVertexId, boolean directed) { + this.ownerVertexId = ownerVertexId; + this.direction = direction; + this.edgeLabelId = edgeLabelId; + this.sortValues = sortValues; + this.otherVertexId = otherVertexId; + this.directed = directed; + this.cache = null; + } + + private Id sourceVertexId() { + return this.direction == Direction.OUT ? + this.ownerVertexId : + this.otherVertexId; + } + + private Id targetVertexId() { + return this.direction == Direction.OUT ? + this.otherVertexId : + this.ownerVertexId; + } + + public Id ownerVertexId() { + return this.ownerVertexId; + } + + public Id edgeLabelId() { + return this.edgeLabelId; + } + + public Direction direction() { + return this.direction; + } + + public byte directionCode() { + return directionToCode(this.direction); + } + + public String sortValues() { + return this.sortValues; + } + + public Id otherVertexId() { + return this.otherVertexId; + } + + public String asString() { + if (this.cache != null) { + return this.cache; + } + if (this.directed) { + this.cache = SplicingIdGenerator.concat( + IdUtil.writeString(this.ownerVertexId), + this.direction.type().string(), + IdUtil.writeLong(this.edgeLabelId), + this.sortValues, + IdUtil.writeString(this.otherVertexId)); + } else { + this.cache = SplicingIdGenerator.concat( + IdUtil.writeString(this.sourceVertexId()), + IdUtil.writeLong(this.edgeLabelId), + this.sortValues, + IdUtil.writeString(this.targetVertexId())); + } + return this.cache; + } + + @Override + public byte[] asBytes() { + return StringEncoding.encode(this.asString()); + } + + @Override + public int length() { + return this.asString().length(); + } + + @Override + public IdType type() { + return IdType.EDGE; + } + + @Override + public int hashCode() { + if (this.directed) { + return this.ownerVertexId.hashCode() ^ + this.direction.hashCode() ^ + this.edgeLabelId.hashCode() ^ + this.sortValues.hashCode() ^ + this.otherVertexId.hashCode(); + } else { + return this.sourceVertexId().hashCode() ^ + this.edgeLabelId.hashCode() ^ + this.sortValues.hashCode() ^ + this.targetVertexId().hashCode(); + } + } + + @Override + public boolean equals(Object object) { + if (!(object instanceof EdgeId)) { + return false; + } + EdgeId other = (EdgeId) object; + if (this.directed) { + return this.ownerVertexId.equals(other.ownerVertexId) && + this.direction == other.direction && + this.edgeLabelId.equals(other.edgeLabelId) && + this.sortValues.equals(other.sortValues) && + this.otherVertexId.equals(other.otherVertexId); + } else { + return this.sourceVertexId().equals(other.sourceVertexId()) && + this.edgeLabelId.equals(other.edgeLabelId) && + this.sortValues.equals(other.sortValues) && + this.targetVertexId().equals(other.targetVertexId()); + } + } + + @Override + public String toString() { + return this.asString(); + } + + public static byte directionToCode(Direction direction) { + return direction.type().code(); + } + + public static EdgeId parse(String id) { + return parse(id, false); + } + + public static EdgeId parse(String id, boolean returnNullIfError) { + String[] idParts = SplicingIdGenerator.split(id); + if (!(idParts.length == 4 || idParts.length == 5)) { + if (returnNullIfError) { + return null; + } + throw new HugeException("EdgeId must format as 4~5 parts, but got %s parts: '%s'", + idParts.length, id); + } + try { + if (idParts.length == 4) { + Id ownerVertexId = IdUtil.readString(idParts[0]); + Id edgeLabelId = IdUtil.readLong(idParts[1]); + String sortValues = idParts[2]; + Id otherVertexId = IdUtil.readString(idParts[3]); + return new EdgeId(ownerVertexId, Direction.OUT, edgeLabelId, + sortValues, otherVertexId); + } else { + assert idParts.length == 5; + Id ownerVertexId = IdUtil.readString(idParts[0]); + HugeType direction = HugeType.fromString(idParts[1]); + Id edgeLabelId = IdUtil.readLong(idParts[2]); + String sortValues = idParts[3]; + Id otherVertexId = IdUtil.readString(idParts[4]); + return new EdgeId(ownerVertexId, Direction.convert(direction), + edgeLabelId, sortValues, otherVertexId); + } + } catch (Throwable e) { + if (returnNullIfError) { + return null; + } + throw new HugeException("Invalid format of edge id '%s'", e, id); + } + } + + public static Id parseStoredString(String id) { + String[] idParts = split(id); + E.checkArgument(idParts.length == 4, "Invalid id format: %s", id); + Id ownerVertexId = IdUtil.readStoredString(idParts[0]); + Id edgeLabelId = IdGenerator.ofStoredString(idParts[1], IdType.LONG); + String sortValues = idParts[2]; + Id otherVertexId = IdUtil.readStoredString(idParts[3]); + return new EdgeId(ownerVertexId, Direction.OUT, edgeLabelId, + sortValues, otherVertexId); + } + + public static String asStoredString(Id id) { + EdgeId eid = (EdgeId) id; + return SplicingIdGenerator.concat( + IdUtil.writeStoredString(eid.sourceVertexId()), + IdGenerator.asStoredString(eid.edgeLabelId()), + eid.sortValues(), + IdUtil.writeStoredString(eid.targetVertexId())); + } + + public static String[] split(String id) { + return SplicingIdGenerator.split(id); + } +} diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/GraphSchema.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/GraphSchema.java new file mode 100644 index 000000000..af432f271 --- /dev/null +++ b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/GraphSchema.java @@ -0,0 +1,105 @@ +/* + * Copyright 2017 HugeGraph Authors + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.baidu.hugegraph.serializer.direct.util; + +import java.util.HashMap; +import java.util.Map; + +import com.baidu.hugegraph.driver.HugeClient; +import com.baidu.hugegraph.exception.ServerException; +import com.baidu.hugegraph.structure.schema.EdgeLabel; +import com.baidu.hugegraph.structure.schema.PropertyKey; +import com.baidu.hugegraph.structure.schema.VertexLabel; + +/** + * We could get all graph schema from server and cache/update it in client(subset of SchemaManager) + */ +public class GraphSchema { + + private final HugeClient client; + private final Map propertyKeys; + private final Map vertexLabels; + private final Map edgeLabels; + + public GraphSchema(HugeClient client) { + this.client = client; + this.propertyKeys = new HashMap<>(); + this.vertexLabels = new HashMap<>(); + this.edgeLabels = new HashMap<>(); + // init all schema + this.updateAll(); + } + + public void updateAll() { + this.propertyKeys.clear(); + this.vertexLabels.clear(); + this.edgeLabels.clear(); + + client.schema().getPropertyKeys().forEach(pk -> this.propertyKeys.put(pk.name(), pk)); + client.schema().getVertexLabels().forEach(vl -> this.vertexLabels.put(vl.name(), vl)); + client.schema().getEdgeLabels().forEach(el -> this.edgeLabels.put(el.name(), el)); + } + + public PropertyKey getPropertyKey(String name) { + PropertyKey propertyKey = this.propertyKeys.get(name); + if (propertyKey == null) { + try { + propertyKey = this.client.schema().getPropertyKey(name); + } catch (ServerException e) { + throw new HugeException("The property key '%s' doesn't exist", name); + } + } + return propertyKey; + } + + public VertexLabel getVertexLabel(String name) { + VertexLabel vertexLabel = this.vertexLabels.get(name); + if (vertexLabel == null) { + try { + vertexLabel = this.client.schema().getVertexLabel(name); + } catch (ServerException e) { + throw new HugeException("The vertex label '%s' doesn't exist", name); + } + } + return vertexLabel; + } + + public EdgeLabel getEdgeLabel(String name) { + EdgeLabel edgeLabel = this.edgeLabels.get(name); + if (edgeLabel == null) { + try { + edgeLabel = this.client.schema().getEdgeLabel(name); + } catch (ServerException e) { + throw new HugeException("The edge label '%s' doesn't exist", name); + } + } + return edgeLabel; + } + + public EdgeLabel getEdgeLabel(int id) { + for (EdgeLabel label : edgeLabels.values()) { + if (label.id() == id) { + return label; + } + } + + throw new HugeException("The edge label id '%s' doesn't exist", id); + } +} diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/HugeException.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/HugeException.java new file mode 100644 index 000000000..143e5ae27 --- /dev/null +++ b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/HugeException.java @@ -0,0 +1,56 @@ +/* + * Copyright 2017 HugeGraph Authors + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.baidu.hugegraph.serializer.direct.util; + +/** + * Used for fix Exception problems, simplify from server + */ +public class HugeException extends RuntimeException { + + private static final long serialVersionUID = -8711375282196157051L; + + public HugeException(String message) { + super(message); + } + + public HugeException(String message, Throwable cause) { + super(message, cause); + } + + public HugeException(String message, Object... args) { + super(String.format(message, args)); + } + + public HugeException(String message, Throwable cause, Object... args) { + super(String.format(message, args), cause); + } + + public Throwable rootCause() { + return rootCause(this); + } + + public static Throwable rootCause(Throwable e) { + Throwable cause = e; + while (cause.getCause() != null) { + cause = cause.getCause(); + } + return cause; + } +} diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/Id.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/Id.java new file mode 100644 index 000000000..3cc7429f1 --- /dev/null +++ b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/Id.java @@ -0,0 +1,70 @@ +/* + * Copyright 2017 HugeGraph Authors + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.baidu.hugegraph.serializer.direct.util; + +import com.baidu.hugegraph.util.E; + +public interface Id { + + int UUID_LENGTH = 16; + + int length(); + + byte[] asBytes(); + + IdType type(); + + default boolean edge() { + return this.type() == IdType.EDGE; + } + + enum IdType { + + UNKNOWN, + LONG, + UUID, + STRING, + EDGE; + + public char prefix() { + if (this == UNKNOWN) { + return 'N'; + } + return this.name().charAt(0); + } + + public static IdType valueOfPrefix(String id) { + E.checkArgument(id != null && id.length() > 0, + "Invalid id '%s'", id); + switch (id.charAt(0)) { + case 'L': + return IdType.LONG; + case 'U': + return IdType.UUID; + case 'S': + return IdType.STRING; + case 'E': + return IdType.EDGE; + default: + return IdType.UNKNOWN; + } + } + } +} diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/SplicingIdGenerator.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/SplicingIdGenerator.java new file mode 100644 index 000000000..53001cdaa --- /dev/null +++ b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/SplicingIdGenerator.java @@ -0,0 +1,134 @@ +/* + * Copyright 2017 HugeGraph Authors + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.baidu.hugegraph.serializer.direct.util; + +import java.util.Arrays; +import java.util.List; + +import com.baidu.hugegraph.serializer.direct.struct.HugeVertex; +import com.baidu.hugegraph.structure.HugeVertex; +import com.baidu.hugegraph.util.IdUtil; + +/** + * This class is used for merge / parse id in primaryKey mode, simplify from server + **/ +public class SplicingIdGenerator { + + private static volatile SplicingIdGenerator instance; + + public static SplicingIdGenerator instance() { + if (instance == null) { + synchronized (SplicingIdGenerator.class) { + if (instance == null) { + instance = new SplicingIdGenerator(); + } + } + } + return instance; + } + + /* + * The following defines can't be java regex special characters: "\^$.|?*+()[{" + */ + private static final char ESCAPE = '`'; + private static final char IDS_SPLITOR = '>'; + private static final char ID_SPLITOR = ':'; + private static final char NAME_SPLITOR = '!'; + + public static final String ESCAPE_STR = String.valueOf(ESCAPE); + public static final String IDS_SPLITOR_STR = String.valueOf(IDS_SPLITOR); + public static final String ID_SPLITOR_STR = String.valueOf(ID_SPLITOR); + + /** + * Generate a string id of HugeVertex from Vertex name + */ + public Id generate(HugeVertex vertex) { + /* + * Hash for row-key which will be evenly distributed. + * We can also use LongEncoding.encode() to encode the int/long hash + * if needed. + * id = String.format("%s%s%s", HashUtil.hash(id), ID_SPLITOR, id); + */ + // TODO: use binary Id with binary fields instead of string id + return splicing(vertex.schemaLabel().id().asString(), vertex.name()); + } + + /** + * Concat multiple ids into one composite id with IDS_SPLITOR + * @param ids the string id values to be contacted + * @return contacted string value + */ + public static String concat(String... ids) { + // NOTE: must support string id when using this method + return IdUtil.escape(IDS_SPLITOR, ESCAPE, ids); + } + + /** + * Split a composite id into multiple ids with IDS_SPLITOR + * @param ids the string id value to be split + * @return split string values + */ + public static String[] split(String ids) { + return IdUtil.unescape(ids, IDS_SPLITOR_STR, ESCAPE_STR); + } + + /** + * Concat property values with NAME_SPLITOR + * @param values the property values to be concatted + * @return concatted string value + */ + public static String concatValues(List values) { + // Convert the object list to string array + int valuesSize = values.size(); + String[] parts = new String[valuesSize]; + for (int i = 0; i < valuesSize; i++) { + parts[i] = values.get(i).toString(); + } + return IdUtil.escape(NAME_SPLITOR, ESCAPE, parts); + } + + /** + * Concat property values with NAME_SPLITOR + * @param values the property values to be contacted + * @return contacted string value + */ + public static String concatValues(Object... values) { + return concatValues(Arrays.asList(values)); + } + + /** + * Concat multiple parts into a single id with ID_SPLITOR + * @param parts the string id values to be spliced + * @return spliced id object + */ + public static Id splicing(String... parts) { + String escaped = IdUtil.escape(ID_SPLITOR, ESCAPE, parts); + return IdGenerator.of(escaped); + } + + /** + * Parse a single id into multiple parts with ID_SPLITOR + * @param id the id object to be parsed + * @return parsed string id parts + */ + public static String[] parse(Id id) { + return IdUtil.unescape(id.asString(), ID_SPLITOR_STR, ESCAPE_STR); + } +} diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/StringEncoding.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/StringEncoding.java new file mode 100644 index 000000000..efe3340de --- /dev/null +++ b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/StringEncoding.java @@ -0,0 +1,166 @@ +/* + * Copyright 2017 HugeGraph Authors + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +// Copyright 2017 JanusGraph Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.baidu.hugegraph.serializer.direct.util; + +import static java.nio.charset.StandardCharsets.UTF_8; + +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.Base64; +import java.util.UUID; + +import com.baidu.hugegraph.util.Bytes; +import com.baidu.hugegraph.util.E; +import com.google.common.base.CharMatcher; + +/** + * Used for encode / decode string values, simplify from server + */ +public final class StringEncoding { + + private static final MessageDigest DIGEST; + private static final byte[] BYTES_EMPTY = new byte[0]; + private static final String STRING_EMPTY = ""; + private static final int BLOCK_SIZE = 4096; + + static { + final String ALG = "SHA-256"; + try { + DIGEST = MessageDigest.getInstance(ALG); + } catch (NoSuchAlgorithmException e) { + throw new HugeException("Failed to load algorithm %s", e, ALG); + } + } + + private static final Base64.Encoder BASE64_ENCODER = Base64.getEncoder(); + private static final Base64.Decoder BASE64_DECODER = Base64.getDecoder(); + + // Similar to {@link StringSerializer} + public static int writeAsciiString(byte[] array, int offset, String value) { + E.checkArgument(CharMatcher.ascii().matchesAllOf(value), + "'%s' must be ASCII string", value); + int len = value.length(); + if (len == 0) { + array[offset++] = (byte) 0x80; + return offset; + } + + int i = 0; + do { + int c = value.charAt(i); + assert c <= 127; + byte b = (byte) c; + if (++i == len) { + b |= 0x80; // End marker + } + array[offset++] = b; + } while (i < len); + + return offset; + } + + public static String readAsciiString(byte[] array, int offset) { + StringBuilder sb = new StringBuilder(); + int c = 0; + do { + c = 0xFF & array[offset++]; + if (c != 0x80) { + sb.append((char) (c & 0x7F)); + } + } while ((c & 0x80) <= 0); + return sb.toString(); + } + + public static int getAsciiByteLength(String value) { + E.checkArgument(CharMatcher.ascii().matchesAllOf(value), + "'%s' must be ASCII string", value); + return value.isEmpty() ? 1 : value.length(); + } + + public static byte[] encode(String value) { + return value.getBytes(UTF_8); + } + + public static String decode(byte[] bytes) { + if (bytes.length == 0) { + return STRING_EMPTY; + } + return new String(bytes, UTF_8); + } + + public static String decode(byte[] bytes, int offset, int length) { + if (length == 0) { + return STRING_EMPTY; + } + return new String(bytes, offset, length, UTF_8); + } + + public static String encodeBase64(byte[] bytes) { + return BASE64_ENCODER.encodeToString(bytes); + } + + public static byte[] decodeBase64(String value) { + if (value.isEmpty()) { + return BYTES_EMPTY; + } + return BASE64_DECODER.decode(value); + } + + public static String sha256(String string) { + byte[] stringBytes = encode(string); + DIGEST.reset(); + return StringEncoding.encodeBase64(DIGEST.digest(stringBytes)); + } + + public static String format(byte[] bytes) { + return String.format("%s[0x%s]", decode(bytes), Bytes.toHex(bytes)); + } + + public static UUID uuid(String value) { + E.checkArgument(value != null, "The UUID can't be null"); + try { + if (value.contains("-") && value.length() == 36) { + return UUID.fromString(value); + } + // UUID represented by hex string + E.checkArgument(value.length() == 32, + "Invalid UUID string: %s", value); + String high = value.substring(0, 16); + String low = value.substring(16); + return new UUID(Long.parseUnsignedLong(high, 16), + Long.parseUnsignedLong(low, 16)); + } catch (NumberFormatException ignored) { + throw new IllegalArgumentException("Invalid UUID string: " + value); + } + } +} diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/structure/GraphElement.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/structure/GraphElement.java index 042882c2b..c4363b887 100644 --- a/hugegraph-client/src/main/java/com/baidu/hugegraph/structure/GraphElement.java +++ b/hugegraph-client/src/main/java/com/baidu/hugegraph/structure/GraphElement.java @@ -93,4 +93,8 @@ public Map properties() { protected abstract GraphElement setProperty(String key, Object value); public abstract GraphElement removeProperty(String key); + + public int sizeOfProperties() { + return properties.size(); + } } diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/structure/constant/Direction.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/structure/constant/Direction.java index 7d93c211a..676bd8785 100644 --- a/hugegraph-client/src/main/java/com/baidu/hugegraph/structure/constant/Direction.java +++ b/hugegraph-client/src/main/java/com/baidu/hugegraph/structure/constant/Direction.java @@ -27,8 +27,8 @@ public enum Direction { BOTH(3, "both"); - private byte code = 0; - private String name = null; + private byte code; + private String name; Direction(int code, String name) { assert code < 256; diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/structure/constant/HugeType.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/structure/constant/HugeType.java index cdfb1818d..fe9de078b 100644 --- a/hugegraph-client/src/main/java/com/baidu/hugegraph/structure/constant/HugeType.java +++ b/hugegraph-client/src/main/java/com/baidu/hugegraph/structure/constant/HugeType.java @@ -65,8 +65,8 @@ public enum HugeType { // Metrics METRICS(240, "metrics"); - private int code; - private String name = null; + private final int code; + private final String name; HugeType(int code, String name) { assert code < 256; diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/structure/graph/Edge.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/structure/graph/Edge.java index 6d6c0dfa0..baddda378 100644 --- a/hugegraph-client/src/main/java/com/baidu/hugegraph/structure/graph/Edge.java +++ b/hugegraph-client/src/main/java/com/baidu/hugegraph/structure/graph/Edge.java @@ -56,6 +56,7 @@ public Edge(@JsonProperty("label") String label) { this.name = null; } + @Override public String id() { return this.id; } diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/structure/graph/Vertex.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/structure/graph/Vertex.java index 0322410f6..f0ce717f0 100644 --- a/hugegraph-client/src/main/java/com/baidu/hugegraph/structure/graph/Vertex.java +++ b/hugegraph-client/src/main/java/com/baidu/hugegraph/structure/graph/Vertex.java @@ -38,6 +38,7 @@ public Vertex(@JsonProperty("label") String label) { this.type = "vertex"; } + @Override public Object id() { return this.id; } diff --git a/hugegraph-loader/src/main/java/com/baidu/hugegraph/loader/builder/EdgeBuilder.java b/hugegraph-loader/src/main/java/com/baidu/hugegraph/loader/builder/EdgeBuilder.java index 899a6ea02..864e4f746 100644 --- a/hugegraph-loader/src/main/java/com/baidu/hugegraph/loader/builder/EdgeBuilder.java +++ b/hugegraph-loader/src/main/java/com/baidu/hugegraph/loader/builder/EdgeBuilder.java @@ -45,7 +45,7 @@ public class EdgeBuilder extends ElementBuilder { private final VertexLabel sourceLabel; private final VertexLabel targetLabel; private final Collection nonNullKeys; - // Used to optimize access performace + // Used to optimize access performance private VertexIdsIndex vertexIdsIndex; private String[] lastNames; @@ -57,7 +57,7 @@ public EdgeBuilder(LoadContext context, InputStruct struct, this.sourceLabel = this.getVertexLabel(this.edgeLabel.sourceLabel()); this.targetLabel = this.getVertexLabel(this.edgeLabel.targetLabel()); this.nonNullKeys = this.nonNullableKeys(this.edgeLabel); - // Ensure that the source/target id fileds are matched with id strategy + // Ensure that the source/target id fields are matched with id strategy this.checkIdFields(this.sourceLabel, this.mapping.sourceFields()); this.checkIdFields(this.targetLabel, this.mapping.targetFields()); diff --git a/hugegraph-loader/src/main/java/com/baidu/hugegraph/loader/flink/HugeGraphOutputFormat.java b/hugegraph-loader/src/main/java/com/baidu/hugegraph/loader/flink/HugeGraphOutputFormat.java index 95701549f..be4934f61 100644 --- a/hugegraph-loader/src/main/java/com/baidu/hugegraph/loader/flink/HugeGraphOutputFormat.java +++ b/hugegraph-loader/src/main/java/com/baidu/hugegraph/loader/flink/HugeGraphOutputFormat.java @@ -19,7 +19,6 @@ package com.baidu.hugegraph.loader.flink; -import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -128,7 +127,7 @@ private synchronized void flushAll() { } @Override - public synchronized void writeRecord(T row) throws IOException { + public synchronized void writeRecord(T row) { for (Map.Entry> builder : this.builders.entrySet()) { ElementMapping elementMapping = builder.getKey().mapping(); @@ -163,7 +162,7 @@ private Tuple2> buildGraphData(ElementBuilder element return Tuple2.of(op, elementBuilder.build(fields, values)); } - private void flush(ElementBuilder elementBuilder, List rows) { + private void flush(ElementBuilder elementBuilder, List rows) { GraphManager g = this.loadContext.client().graph(); ElementMapping elementMapping = elementBuilder.mapping(); for (String row : rows) { diff --git a/hugegraph-loader/src/main/java/com/baidu/hugegraph/loader/util/DataTypeUtil.java b/hugegraph-loader/src/main/java/com/baidu/hugegraph/loader/util/DataTypeUtil.java index 87ff516bb..6f4e4dbac 100644 --- a/hugegraph-loader/src/main/java/com/baidu/hugegraph/loader/util/DataTypeUtil.java +++ b/hugegraph-loader/src/main/java/com/baidu/hugegraph/loader/util/DataTypeUtil.java @@ -94,7 +94,7 @@ public static long parseNumber(String key, Object rawValue) { if (rawValue instanceof Number) { return ((Number) rawValue).longValue(); } else if (rawValue instanceof String) { - // trim() is a little time consuming + // trim() is a little time-consuming return parseLong(((String) rawValue).trim()); } throw new IllegalArgumentException(String.format( From 9bb9b53d88ee895e135997065cafcf634cc9a8b3 Mon Sep 17 00:00:00 2001 From: JackyYangPassion Date: Wed, 31 Aug 2022 19:50:09 +0800 Subject: [PATCH 2/9] write bypass server for HBase --- hugegraph-client/pom.xml | 12 + .../serializer/direct/BinaryEntry.java | 256 ---- .../serializer/direct/HBaseSerializer.java | 102 ++ .../serializer/direct/RocksDBSerializer.java | 375 ----- .../serializer/direct/backup/HugeEdge.java | 492 ------- .../serializer/direct/backup/HugeElement.java | 410 ------ .../serializer/direct/backup/HugeVertex.java | 386 ----- .../direct/backup/OldSerializer.java | 1294 ----------------- .../serializer/direct/reuse/BytesDemo.java | 212 +-- .../direct/struct/ElementBuilder.java | 625 -------- .../serializer/direct/util/BytesBuffer.java | 346 ++--- .../serializer/direct/util/DataTypeUtil.java | 604 ++++---- .../serializer/direct/util/EdgeId.java | 242 --- .../hugegraph/serializer/direct/util/Id.java | 34 +- .../serializer/direct/util/IdGenerator.java | 421 ++++++ .../serializer/direct/util/LZ4Util.java | 74 + .../direct/util/SplicingIdGenerator.java | 22 +- pom.xml | 1 + 18 files changed, 1155 insertions(+), 4753 deletions(-) delete mode 100644 hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/BinaryEntry.java create mode 100644 hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/HBaseSerializer.java delete mode 100644 hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/backup/HugeEdge.java delete mode 100644 hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/backup/HugeElement.java delete mode 100644 hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/backup/HugeVertex.java delete mode 100644 hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/backup/OldSerializer.java delete mode 100644 hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/struct/ElementBuilder.java delete mode 100644 hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/EdgeId.java create mode 100644 hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/IdGenerator.java create mode 100644 hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/LZ4Util.java diff --git a/hugegraph-client/pom.xml b/hugegraph-client/pom.xml index ad96edea8..762f81ad7 100644 --- a/hugegraph-client/pom.xml +++ b/hugegraph-client/pom.xml @@ -39,6 +39,18 @@ ${hugegraph.common.version} + + org.lz4 + lz4-java + 1.4.0 + + + + org.apache.hbase + hbase-client + ${hbase.version} + + org.glassfish.jersey.containers jersey-container-servlet diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/BinaryEntry.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/BinaryEntry.java deleted file mode 100644 index 5232f686e..000000000 --- a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/BinaryEntry.java +++ /dev/null @@ -1,256 +0,0 @@ -/* - * Copyright 2017 HugeGraph Authors - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with this - * work for additional information regarding copyright ownership. The ASF - * licenses this file to You under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.baidu.hugegraph.serializer.direct; - -import static com.baidu.hugegraph.serializer.direct.util.StringEncoding.decode; - -import java.nio.ByteBuffer; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; - -import com.baidu.hugegraph.serializer.direct.struct.HugeType; -import com.baidu.hugegraph.serializer.direct.util.BytesBuffer; -import com.baidu.hugegraph.serializer.direct.util.Id; -import com.baidu.hugegraph.util.Bytes; -import com.baidu.hugegraph.util.E; - -public class BinaryEntry { - - private final HugeType type; - private final BinaryId id; - private Id subId; - private final List columns; - - public static final long COMMIT_BATCH = 500L; - - public BinaryEntry(HugeType type, byte[] bytes) { - this(type, BytesBuffer.wrap(bytes).parseId(type, false)); - } - - public BinaryEntry(HugeType type, byte[] bytes, boolean enablePartition) { - this(type, BytesBuffer.wrap(bytes).parseId(type, enablePartition)); - } - - public BinaryEntry(HugeType type, BinaryId id) { - this.type = type; - this.id = id; - this.subId = null; - this.columns = new ArrayList<>(); - } - - public HugeType type() { - return this.type; - } - - public BinaryId id() { - return this.id; - } - - public Id originId() { - return this.id.origin(); - } - - public Id subId() { - return this.subId; - } - - public void subId(Id subId) { - this.subId = subId; - } - - @Override - public String toString() { - return String.format("%s: %s", this.id, this.columns.toString()); - } - - public BackendColumn column(byte[] name) { - for (BackendColumn col : this.columns) { - if (Bytes.equals(col.name, name)) { - return col; - } - } - return null; - } - - public void column(BackendColumn column) { - this.columns.add(column); - } - - public void column(byte[] name, byte[] value) { - E.checkNotNull(name, "name"); - value = value != null ? value : BytesBuffer.BYTES_EMPTY; - this.columns.add(BackendColumn.of(name, value)); - } - - public Collection columns() { - return Collections.unmodifiableList(this.columns); - } - - public int columnsSize() { - return this.columns.size(); - } - - public void columns(Collection bytesColumns) { - this.columns.addAll(bytesColumns); - } - - public void columns(BackendColumn bytesColumn) { - this.columns.add(bytesColumn); - long maxSize = COMMIT_BATCH; - if (this.columns.size() > maxSize) { - E.checkState(false, "Too many columns in one entry: %s", maxSize); - } - } - - public BackendColumn removeColumn(int index) { - return this.columns.remove(index); - } - - public void clear() { - this.columns.clear(); - } - - @Override - public boolean equals(Object obj) { - if (!(obj instanceof BinaryEntry)) { - return false; - } - BinaryEntry other = (BinaryEntry) obj; - if (this.id() != other.id() && !this.id().equals(other.id())) { - return false; - } - if (this.columns.size() != other.columns.size()) { - return false; - } - return new HashSet<>(this.columns).containsAll(other.columns); - } - - @Override - public int hashCode() { - return this.id().hashCode() ^ this.columns.size(); - } - - protected static final class BinaryId implements Id { - - private final byte[] bytes; - private final Id id; - - public BinaryId(byte[] bytes, Id id) { - this.bytes = bytes; - this.id = id; - } - - public Id origin() { - return this.id; - } - - @Override - public byte[] asBytes() { - return this.bytes; - } - - @Override - public IdType type() { - return IdType.UNKNOWN; - } - - @Override - public boolean edge() { - return Id.super.edge(); - } - - public byte[] asBytes(int offset) { - E.checkArgument(offset < this.bytes.length, - "Invalid offset %s, must be < length %s", - offset, this.bytes.length); - return Arrays.copyOfRange(this.bytes, offset, this.bytes.length); - } - - @Override - public int length() { - return this.bytes.length; - } - - @Override - public int hashCode() { - return ByteBuffer.wrap(this.bytes).hashCode(); - } - - @Override - public boolean equals(Object other) { - if (!(other instanceof BinaryId)) { - return false; - } - return Arrays.equals(this.bytes, ((BinaryId) other).bytes); - } - - @Override - public String toString() { - return "0x" + Bytes.toHex(this.bytes); - } - } - - static class BackendColumn implements Comparable { - - public byte[] name; - public byte[] value; - - public static BackendColumn of(byte[] name, byte[] value) { - BackendColumn col = new BackendColumn(); - col.name = name; - col.value = value; - return col; - } - - @Override - public String toString() { - return String.format("%s=%s", decode(name), decode(value)); - } - - @Override - public int compareTo(BackendColumn other) { - if (other == null) { - return 1; - } - return Bytes.compare(this.name, other.name); - } - - @Override - public boolean equals(Object obj) { - if (!(obj instanceof BackendColumn)) { - return false; - } - BackendColumn other = (BackendColumn) obj; - return Bytes.equals(this.name, other.name) && - Bytes.equals(this.value, other.value); - } - - @Override - public int hashCode() { - return Arrays.hashCode(this.name) ^ - Arrays.hashCode(this.value); - } - - } -} diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/HBaseSerializer.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/HBaseSerializer.java new file mode 100644 index 000000000..82e6c3372 --- /dev/null +++ b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/HBaseSerializer.java @@ -0,0 +1,102 @@ +package com.baidu.hugegraph.serializer.direct; + +import com.baidu.hugegraph.driver.HugeClient; +import com.baidu.hugegraph.serializer.direct.struct.HugeType; +import com.baidu.hugegraph.serializer.direct.util.BytesBuffer; +import com.baidu.hugegraph.serializer.direct.util.GraphSchema; +import com.baidu.hugegraph.serializer.direct.util.Id; +import com.baidu.hugegraph.serializer.direct.util.IdGenerator; +import com.baidu.hugegraph.structure.GraphElement; +import com.baidu.hugegraph.structure.graph.Edge; +import com.baidu.hugegraph.structure.schema.PropertyKey; + +import java.util.Arrays; +import java.util.Map; + +public class HBaseSerializer { + + private int edgeLogicPartitions = 30; + private int vertexLogicPartitions = 10; + private HugeClient client; + private GraphSchema graphSchema; + + + public HBaseSerializer(HugeClient client, int vertexPartitions, int edgePartitions){ + this.client = client; + this.graphSchema = new GraphSchema(client); + this.edgeLogicPartitions = edgePartitions; + this.vertexLogicPartitions = vertexPartitions; + } + + public byte[] getKeyBytes(GraphElement e) { + byte[] array = null; + if(e.type() == "vertex" && e.id() != null){ + BytesBuffer buffer = BytesBuffer.allocate(2 + 1 + e.id().toString().length()); + buffer.writeShort(getPartition(HugeType.VERTEX, IdGenerator.of(e.id()))); + buffer.writeId(IdGenerator.of(e.id())); + array = buffer.bytes(); + }else if ( e.type() == "edge" ){ + BytesBuffer buffer = BytesBuffer.allocate(BytesBuffer.BUF_EDGE_ID); + Edge edge = (Edge)e; + buffer.writeShort(getPartition(HugeType.EDGE, IdGenerator.of(edge.sourceId()))); + buffer.writeId(IdGenerator.of(edge.sourceId())); + buffer.write(HugeType.EDGE_OUT.code()); + buffer.writeId(IdGenerator.of(graphSchema.getEdgeLabel(e.label()).id()));//出现错误 + buffer.writeStringWithEnding(""); + buffer.writeId(IdGenerator.of(edge.targetId())); + array = buffer.bytes(); + } + return array; + } + + public byte[] getValueBytes(GraphElement e) { + byte[] array = null; + if(e.type() == "vertex"){ + int propsCount = e.properties().size() ;//vertex.sizeOfProperties(); + BytesBuffer buffer = BytesBuffer.allocate(8 + 16 * propsCount); + buffer.writeId(IdGenerator.of(graphSchema.getVertexLabel(e.label()).id())); + buffer.writeVInt(propsCount); + for(Map.Entry entry : e.properties().entrySet()){ + PropertyKey propertyKey = graphSchema.getPropertyKey(entry.getKey()); + buffer.writeVInt(propertyKey.id().intValue()); + buffer.writeProperty(propertyKey.dataType(),entry.getValue()); + } + array = buffer.bytes(); + } else if ( e.type() == "edge" ){ + int propsCount = e.properties().size(); + BytesBuffer buffer = BytesBuffer.allocate(4 + 16 * propsCount); + buffer.writeVInt(propsCount); + for(Map.Entry entry : e.properties().entrySet()){ + PropertyKey propertyKey = graphSchema.getPropertyKey(entry.getKey()); + buffer.writeVInt(propertyKey.id().intValue()); + buffer.writeProperty(propertyKey.dataType(),entry.getValue()); + } + array = buffer.bytes(); + } + + return array; + } + + public short getPartition(HugeType type, Id id) { + int hashcode = Arrays.hashCode(id.asBytes()); + short partition = 1; + if (type.isEdge()) { + partition = (short) (hashcode % edgeLogicPartitions); + } else if (type.isVertex()) { + partition = (short) (hashcode % vertexLogicPartitions); + } + return partition > 0 ? partition : (short) -partition; + } + + public int getEdgeLogicPartitions(){ + return this.edgeLogicPartitions; + } + + public int getVertexLogicPartitions(){ + return this.vertexLogicPartitions; + } + + public void close(){ + this.client.close(); + } +} diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/RocksDBSerializer.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/RocksDBSerializer.java index 0ead419c6..46f07a722 100644 --- a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/RocksDBSerializer.java +++ b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/RocksDBSerializer.java @@ -19,42 +19,6 @@ package com.baidu.hugegraph.serializer.direct; -import static com.baidu.hugegraph.serializer.direct.BinaryEntry.BackendColumn; -import static com.baidu.hugegraph.serializer.direct.BinaryEntry.BinaryId; -import static com.baidu.hugegraph.serializer.direct.struct.HugeType.EDGE; -import static com.baidu.hugegraph.serializer.direct.struct.HugeType.VERTEX; -import static com.baidu.hugegraph.structure.graph.Graph.HugeEdge; -import static com.baidu.hugegraph.structure.graph.Graph.HugeVertex; - -import java.util.Arrays; -import java.util.Collection; -import java.util.Iterator; -import java.util.Map; - -import com.baidu.hugegraph.HugeGraph; -import com.baidu.hugegraph.backend.id.IdGenerator; -import com.baidu.hugegraph.driver.HugeClient; -import com.baidu.hugegraph.serializer.direct.struct.HugeElement; -import com.baidu.hugegraph.serializer.direct.struct.HugeType; -import com.baidu.hugegraph.serializer.direct.util.BytesBuffer; -import com.baidu.hugegraph.serializer.direct.util.EdgeId; -import com.baidu.hugegraph.serializer.direct.util.GraphSchema; -import com.baidu.hugegraph.serializer.direct.util.HugeException; -import com.baidu.hugegraph.serializer.direct.util.Id; -import com.baidu.hugegraph.serializer.direct.util.StringEncoding; -import com.baidu.hugegraph.structure.GraphElement; -import com.baidu.hugegraph.structure.HugeProperty; -import com.baidu.hugegraph.structure.SchemaElement; -import com.baidu.hugegraph.structure.constant.Cardinality; -import com.baidu.hugegraph.structure.graph.Edge; -import com.baidu.hugegraph.structure.graph.Vertex; -import com.baidu.hugegraph.structure.schema.EdgeLabel; -import com.baidu.hugegraph.structure.schema.PropertyKey; -import com.baidu.hugegraph.structure.schema.VertexLabel; -import com.baidu.hugegraph.type.define.HugeKeys; -import com.baidu.hugegraph.util.Bytes; -import com.baidu.hugegraph.util.E; - /** * In this serializer, we only support normal type now: * - number @@ -63,343 +27,4 @@ **/ public class RocksDBSerializer { - /* - * ID is stored in column name if keyWithIdPrefix=true like RocksDB, stored in rowkey for HBase - */ - private final boolean keyWithIdPrefix; - private final boolean indexWithIdPrefix; - private final boolean enablePartition; - GraphSchema schema; - - public RocksDBSerializer(HugeClient client) { - this(true, true, false, client); - } - - public RocksDBSerializer(boolean keyWithIdPrefix, boolean indexWithIdPrefix, - boolean enablePartition, HugeClient client) { - // only consider rocksdb now - this.keyWithIdPrefix = true; - this.indexWithIdPrefix = true; - this.enablePartition = false; - this.schema = new GraphSchema(client); - } - - protected BinaryEntry newBackendEntry(HugeType type, Id id) { - if (type == VERTEX) { - BytesBuffer buffer = BytesBuffer.allocate(2 + 1 + id.length()); - writePartitionedId(VERTEX, id, buffer); - return new BinaryEntry(type, buffer.bytes()); - } - - if (type == EDGE) { - return new BinaryEntry(type, (BinaryId) id); - } - - BytesBuffer buffer = BytesBuffer.allocate(1 + id.length()); - byte[] idBytes = buffer.writeId(id).bytes(); - return new BinaryEntry(type, new BinaryId(idBytes, id)); - } - - protected final BinaryEntry newBackendEntry(Vertex vertex) { - return newBackendEntry(VERTEX, vertex.id()); - } - - protected final BinaryEntry newBackendEntry(Edge edge) { - BinaryId id = writeEdgeId(edge.idWithDirection()); - return newBackendEntry(EDGE, id); - } - - protected final BinaryEntry newBackendEntry(SchemaElement elem) { - return newBackendEntry(elem.type(), elem.id()); - } - - protected BackendColumn formatLabel(GraphElement elem) { - BackendColumn col = new BackendColumn(); - col.name = this.formatSyspropName(elem.id(), HugeKeys.LABEL); - Id label = elem.schemaLabel().id(); - BytesBuffer buffer = BytesBuffer.allocate(label.length() + 1); - col.value = buffer.writeId(label).bytes(); - return col; - } - - protected byte[] formatPropertyName(HugeProperty prop) { - Id id = prop.element().id(); - int idLen = this.keyWithIdPrefix ? 1 + id.length() : 0; - Id pkeyId = prop.propertyKey().id(); - BytesBuffer buffer = BytesBuffer.allocate(idLen + 2 + pkeyId.length()); - if (this.keyWithIdPrefix) { - buffer.writeId(id); - } - buffer.write(prop.type().code()); - buffer.writeId(pkeyId); - return buffer.bytes(); - } - - protected BackendColumn formatProperty(HugeProperty prop) { - BytesBuffer buffer = BytesBuffer.allocate(BytesBuffer.BUF_PROPERTY); - buffer.writeProperty(prop.propertyKey(), prop.value()); - return BackendColumn.of(this.formatPropertyName(prop), buffer.bytes()); - } - - protected void parseProperty(Id pkeyId, BytesBuffer buffer, GraphElement owner) { - PropertyKey pkey = owner.graph().propertyKey(pkeyId); - - // Parse value - Object value = buffer.readProperty(pkey); - - // Set properties of vertex/edge - if (pkey.cardinality() == Cardinality.SINGLE) { - owner.addProperty(pkey, value); - } else { - if (!(value instanceof Collection)) { - throw new HugeException("Invalid value of non-single property: %s", value); - } - owner.addProperty(pkey, value); - } - } - - protected void formatProperties(Map props, - BytesBuffer buffer) { - // Write properties size - buffer.writeVInt(props.size()); - - // Write properties data - for (Map.Entry kv : props.entrySet()) { - String key = kv.getKey(); - Object value = kv.getValue(); - PropertyKey pkey = - } - - for (HugeProperty property : props) { - PropertyKey pkey = property.propertyKey(); - buffer.writeVInt(SchemaElement.schemaId(pkey.id())); - buffer.writeProperty(pkey, property.value()); - } - } - - protected void parseProperties(BytesBuffer buffer, HugeElement owner) { - int size = buffer.readVInt(); - assert size >= 0; - for (int i = 0; i < size; i++) { - Id pkeyId = IdGenerator.of(buffer.readVInt()); - this.parseProperty(pkeyId, buffer, owner); - } - } - - protected byte[] formatEdgeValue(Edge edge) { - int propsCount = edge.sizeOfProperties(); - BytesBuffer buffer = BytesBuffer.allocate(4 + 16 * propsCount); - - // Write edge id - //buffer.writeId(edge.id()); - - // Write edge properties - this.formatProperties(edge.properties(), buffer); - return buffer.bytes(); - } - - protected void parseEdge(BackendColumn col, HugeVertex vertex) { - // owner-vertex + dir + edge-label + sort-values + other-vertex - - BytesBuffer buffer = BytesBuffer.wrap(col.name); - if (this.keyWithIdPrefix) { - // Consume owner-vertex id - buffer.readId(); - } - byte type = buffer.read(); - Id labelId = buffer.readId(); - String sortValues = buffer.readStringWithEnding(); - Id otherVertexId = buffer.readId(); - - boolean direction = EdgeId.isOutDirectionFromCode(type); - EdgeLabel edgeLabel = schema.getEdgeLabel(labelId); - - // Construct edge - HugeEdge edge = HugeEdge.constructEdge(vertex, direction, edgeLabel, - sortValues, otherVertexId); - - // Parse edge-id + edge-properties - buffer = BytesBuffer.wrap(col.value); - - //Id id = buffer.readId(); - - // Parse edge properties - this.parseProperties(buffer, edge); - } - - protected void parseVertex(byte[] value, HugeVertex vertex) { - BytesBuffer buffer = BytesBuffer.wrap(value); - - // Parse vertex label - VertexLabel label = schema.getVertexLabel(buffer.readId()); - vertex.correctVertexLabel(label); - - // Parse properties - this.parseProperties(buffer, vertex); - } - - protected void parseColumn(BackendColumn col, Vertex vertex) { - BytesBuffer buffer = BytesBuffer.wrap(col.name); - Id id = this.keyWithIdPrefix ? buffer.readId() : vertex.id(); - E.checkState(buffer.remaining() > 0, "Missing column type"); - byte type = buffer.read(); - // Parse property - if (type == HugeType.PROPERTY.code()) { - Id pkeyId = buffer.readId(); - this.parseProperty(pkeyId, BytesBuffer.wrap(col.value), vertex); - } - // Parse edge - else if (type == HugeType.EDGE_IN.code() || - type == HugeType.EDGE_OUT.code()) { - this.parseEdge(col, vertex); - } else { - E.checkState(false, "Invalid entry(%s) with unknown type(%s): 0x%s", - id, type & 0xff, Bytes.toHex(col.name)); - } - } - - public BinaryEntry writeVertex(Vertex vertex) { - BinaryEntry entry = newBackendEntry(vertex); - - int propsCount = vertex.sizeOfProperties(); - BytesBuffer buffer = BytesBuffer.allocate(8 + 16 * propsCount); - - // Write vertex label - buffer.writeId(vertex.schemaLabel().id()); - - // Write all properties of the vertex - this.formatProperties(vertex.properties(), buffer); - - // Fill column - byte[] name = this.keyWithIdPrefix ? entry.id().asBytes() : BytesBuffer.BYTES_EMPTY; - entry.column(name, buffer.bytes()); - - return entry; - } - - public HugeVertex readVertex(BinaryEntry bytesEntry) { - if (bytesEntry == null) { - return null; - } - BinaryEntry entry = this.convertEntry(bytesEntry); - - // Parse id - Id id = entry.id().origin(); - Id vid = id.edge() ? ((EdgeId) id).ownerVertexId() : id; - HugeVertex vertex = new HugeVertex(vid, VertexLabel.NONE); - - // Parse all properties and edges of a Vertex - Iterator iterator = entry.columns().iterator(); - for (int index = 0; iterator.hasNext(); index++) { - BackendColumn col = iterator.next(); - if (entry.type().isEdge()) { - // NOTE: the entry id type is vertex even if entry type is edge - // Parse vertex edges - this.parseColumn(col, vertex); - } else { - assert entry.type().isVertex(); - // Parse vertex properties - assert entry.columnsSize() >= 1 : entry.columnsSize(); - if (index == 0) { - this.parseVertex(col.value, vertex); - } else { - this.parseVertexOlap(col.value, vertex); - } - } - } - - return vertex; - } - - protected void parseVertexOlap(byte[] value, HugeVertex vertex) { - BytesBuffer buffer = BytesBuffer.wrap(value); - Id pkeyId = IdGenerator.of(buffer.readVInt()); - this.parseProperty(pkeyId, buffer, vertex); - } - - public BinaryEntry writeEdge(HugeEdge edge) { - BinaryEntry entry = newBackendEntry(edge); - byte[] name = this.keyWithIdPrefix ? - entry.id().asBytes() : BytesBuffer.BYTES_EMPTY; - byte[] value = this.formatEdgeValue(edge); - entry.column(name, value); - - return entry; - } - - public HugeEdge readEdge(BinaryEntry bytesEntry) { - HugeVertex vertex = this.readVertex(bytesEntry); - Collection edges = vertex.getEdges(); - if (edges.size() != 1) { - E.checkState(false, "Expect 1 edge in vertex, but got %s", edges.size()); - } - return edges.iterator().next(); - } - - public BinaryEntry writeId(HugeType type, Id id) { - return newBackendEntry(type, id); - } - - private BinaryId writeEdgeId(Id id) { - EdgeId edgeId; - if (id instanceof EdgeId) { - edgeId = (EdgeId) id; - } else { - edgeId = EdgeId.parse(id.asString()); - } - BytesBuffer buffer = BytesBuffer.allocate(BytesBuffer.BUF_EDGE_ID); - if (this.enablePartition) { - buffer.writeShort(getPartition(HugeType.EDGE, edgeId.ownerVertexId())); - buffer.writeEdgeId(edgeId); - } else { - buffer.writeEdgeId(edgeId); - } - return new BinaryId(buffer.bytes(), id); - } - - private void writePartitionedId(HugeType type, Id id, BytesBuffer buffer) { - if (this.enablePartition) { - buffer.writeShort(getPartition(type, id)); - buffer.writeId(id); - } else { - buffer.writeId(id); - } - } - - protected short getPartition(HugeType type, Id id) { - return 0; - } - - public BinaryEntry parse(BinaryEntry originEntry) { - byte[] bytes = originEntry.id().asBytes(); - BinaryEntry parsedEntry = new BinaryEntry(originEntry.type(), bytes, this.enablePartition); - - if (this.enablePartition) { - bytes = Arrays.copyOfRange(bytes, parsedEntry.id().length() + 2, bytes.length); - } else { - bytes = Arrays.copyOfRange(bytes, parsedEntry.id().length(), bytes.length); - } - BytesBuffer buffer = BytesBuffer.allocate(BytesBuffer.BUF_EDGE_ID); - buffer.write(parsedEntry.id().asBytes()); - buffer.write(bytes); - parsedEntry = new BinaryEntry(originEntry.type(), - new BinaryId(buffer.bytes(), BytesBuffer.wrap(buffer.bytes()).readEdgeId())); - - for (BackendColumn col : originEntry.columns()) { - parsedEntry.column(buffer.bytes(), col.value); - } - return parsedEntry; - } - - - protected static boolean indexIdLengthExceedLimit(Id id) { - return id.asBytes().length > BytesBuffer.INDEX_HASH_ID_THRESHOLD; - } - - protected static boolean indexFieldValuesUnmatched(byte[] value, Object fieldValues) { - if (value != null && value.length > 0 && fieldValues != null) { - return !StringEncoding.decode(value).equals(fieldValues); - } - return false; - } } diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/backup/HugeEdge.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/backup/HugeEdge.java deleted file mode 100644 index 798a85eae..000000000 --- a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/backup/HugeEdge.java +++ /dev/null @@ -1,492 +0,0 @@ -/* - * Copyright 2017 HugeGraph Authors - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with this - * work for additional information regarding copyright ownership. The ASF - * licenses this file to You under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.baidu.hugegraph.serializer.direct.struct; - -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; - -import org.apache.logging.log4j.util.Strings; -import org.apache.tinkerpop.gremlin.structure.Direction; -import org.apache.tinkerpop.gremlin.structure.Edge; -import org.apache.tinkerpop.gremlin.structure.Property; -import org.apache.tinkerpop.gremlin.structure.Vertex; -import org.apache.tinkerpop.gremlin.structure.util.StringFactory; - -import com.baidu.hugegraph.HugeGraph; -import com.baidu.hugegraph.backend.id.EdgeId; -import com.baidu.hugegraph.backend.query.ConditionQuery; -import com.baidu.hugegraph.backend.query.QueryResults; -import com.baidu.hugegraph.perf.PerfUtil.Watched; -import com.baidu.hugegraph.schema.EdgeLabel; -import com.baidu.hugegraph.schema.PropertyKey; -import com.baidu.hugegraph.schema.VertexLabel; -import com.baidu.hugegraph.serializer.direct.BytesBuffer; -import com.baidu.hugegraph.serializer.direct.util.Id; -import com.baidu.hugegraph.serializer.direct.util.HugeException; -import com.baidu.hugegraph.type.HugeType; -import com.baidu.hugegraph.type.define.Directions; -import com.baidu.hugegraph.type.define.HugeKeys; -import com.baidu.hugegraph.util.E; -import com.google.common.collect.ImmutableList; - -public class HugeEdge extends HugeElement implements Edge, Cloneable { - - private Id id; - private String name; - - private HugeVertex sourceVertex; - private HugeVertex targetVertex; - private boolean isOutEdge; - - public HugeEdge(HugeVertex owner, Id id, EdgeLabel label, - HugeVertex other) { - this(owner.graph(), id, label); - this.fresh(true); - this.vertices(owner, other); - } - - public HugeEdge(final HugeGraph graph, Id id, String label) { - super(graph); - - E.checkArgumentNotNull(label, "Edge label can't be null"); - this.label = label; - - this.id = id; - this.name = null; - this.sourceVertex = null; - this.targetVertex = null; - this.isOutEdge = true; - } - - @Override - public HugeType type() { - // NOTE: we optimize the edge type that let it include direction - return this.isOutEdge ? HugeType.EDGE_OUT : HugeType.EDGE_IN; - } - - @Override - public EdgeId id() { - return (EdgeId) this.id; - } - - @Override - public EdgeLabel schemaLabel() { - assert this.graph().sameAs(this.label.graph()); - return this.label; - } - - @Override - public String name() { - if (this.name == null) { - List sortValues = this.sortValues(); - if (sortValues.isEmpty()) { - this.name = Strings.EMPTY; - } else { - this.name = ConditionQuery.concatValues(sortValues); - } - } - return this.name; - } - - public void name(String name) { - this.name = name; - } - - public boolean selfLoop() { - return this.sourceVertex != null && - this.sourceVertex == this.targetVertex; - } - - public Directions direction() { - return this.isOutEdge ? Directions.OUT : Directions.IN; - } - - public boolean matchDirection(Directions direction) { - if (direction == Directions.BOTH || this.selfLoop()) { - return true; - } - return this.isDirection(direction); - } - - public boolean isDirection(Directions direction) { - return this.isOutEdge && direction == Directions.OUT || - !this.isOutEdge && direction == Directions.IN; - } - - @Watched(prefix = "edge") - public void assignId() { - // Generate an id and assign - this.id = new EdgeId(this.ownerVertex(), this.direction(), - this.schemaLabel().id(), this.name(), - this.otherVertex()); - - if (this.fresh()) { - int len = this.id.length(); - E.checkArgument(len <= BytesBuffer.BIG_ID_LEN_MAX, - "The max length of edge id is %s, but got %s {%s}", - BytesBuffer.BIG_ID_LEN_MAX, len, this.id); - } - } - - @Watched(prefix = "edge") - public EdgeId idWithDirection() { - return ((EdgeId) this.id).directed(true); - } - - @Watched(prefix = "edge") - protected List sortValues() { - List sortKeys = this.schemaLabel().sortKeys(); - if (sortKeys.isEmpty()) { - return ImmutableList.of(); - } - List propValues = new ArrayList<>(sortKeys.size()); - for (Id sk : sortKeys) { - HugeProperty property = this.getProperty(sk); - E.checkState(property != null, - "The value of sort key '%s' can't be null", sk); - Object propValue = property.serialValue(true); - if (Strings.EMPTY.equals(propValue)) { - propValue = ConditionQuery.INDEX_VALUE_EMPTY; - } - propValues.add(propValue); - } - return propValues; - } - - @Override - public Property property(String key, V value) { - PropertyKey propertyKey = this.graph().propertyKey(key); - // Check key in edge label - E.checkArgument(this.label.properties().contains(propertyKey.id()), - "Invalid property '%s' for edge label '%s'", - key, this.label()); - // Sort-Keys can only be set once - if (this.schemaLabel().sortKeys().contains(propertyKey.id())) { - E.checkArgument(!this.hasProperty(propertyKey.id()), - "Can't update sort key: '%s'", key); - } - return this.addProperty(propertyKey, value, !this.fresh()); - } - - @Watched(prefix = "edge") - @Override - protected HugeEdgeProperty newProperty(PropertyKey pkey, V val) { - return new HugeEdgeProperty<>(this, pkey, val); - } - - @Watched(prefix = "edge") - @Override - protected boolean ensureFilledProperties(boolean throwIfNotExist) { - if (this.isPropLoaded()) { - this.updateToDefaultValueIfNone(); - return true; - } - - // Skip query if there is no any property key in schema - if (this.schemaLabel().properties().isEmpty()) { - this.propLoaded(); - return true; - } - - // Seems there is no scene to be here - Iterator edges = this.graph().edges(this.id()); - Edge edge = QueryResults.one(edges); - if (edge == null && !throwIfNotExist) { - return false; - } - E.checkState(edge != null, "Edge '%s' does not exist", this.id); - this.copyProperties((HugeEdge) edge); - this.updateToDefaultValueIfNone(); - return true; - } - - @Watched(prefix = "edge") - @SuppressWarnings("unchecked") // (Property) prop - @Override - public Iterator> properties(String... keys) { - this.ensureFilledProperties(true); - - // Capacity should be about the following size - int propsCapacity = keys.length == 0 ? - this.sizeOfProperties() : - keys.length; - List> props = new ArrayList<>(propsCapacity); - - if (keys.length == 0) { - for (HugeProperty prop : this.getProperties()) { - assert prop instanceof Property; - props.add((Property) prop); - } - } else { - for (String key : keys) { - Id pkeyId; - try { - pkeyId = this.graph().propertyKey(key).id(); - } catch (IllegalArgumentException ignored) { - continue; - } - HugeProperty prop = this.getProperty(pkeyId); - if (prop == null) { - // Not found - continue; - } - assert prop instanceof Property; - props.add((Property) prop); - } - } - return props.iterator(); - } - - @Override - public Object sysprop(HugeKeys key) { - switch (key) { - case ID: - return this.id(); - case OWNER_VERTEX: - return this.ownerVertex().id(); - case LABEL: - return this.schemaLabel().id(); - case DIRECTION: - return this.direction(); - case OTHER_VERTEX: - return this.otherVertex().id(); - case SORT_VALUES: - return this.name(); - case PROPERTIES: - return this.getPropertiesMap(); - default: - E.checkArgument(false, - "Invalid system property '%s' of Edge", key); - return null; - } - } - - @Override - public Iterator vertices(Direction direction) { - List vertices = new ArrayList<>(2); - switch (direction) { - case OUT: - vertices.add(this.sourceVertex()); - break; - case IN: - vertices.add(this.targetVertex()); - break; - case BOTH: - vertices.add(this.sourceVertex()); - vertices.add(this.targetVertex()); - break; - default: - throw new AssertionError("Unsupported direction: " + direction); - } - - return vertices.iterator(); - } - - @Override - public Vertex outVertex() { - return this.sourceVertex(); - } - - @Override - public Vertex inVertex() { - return this.targetVertex(); - } - - public void vertices(HugeVertex owner, HugeVertex other) { - Id ownerLabel = owner.schemaLabel().id(); - if (ownerLabel.equals(this.label.sourceLabel())) { - this.vertices(true, owner, other); - } else { - ownerLabel.equals(this.label.targetLabel()); - this.vertices(false, owner, other); - } - } - - public void vertices(boolean outEdge, HugeVertex owner, HugeVertex other) { - this.isOutEdge = outEdge; - if (this.isOutEdge) { - this.sourceVertex = owner; - this.targetVertex = other; - } else { - this.sourceVertex = other; - this.targetVertex = owner; - } - } - - @Watched - public HugeEdge switchOwner() { - HugeEdge edge = this.clone(); - edge.isOutEdge = !edge.isOutEdge; - edge.id = ((EdgeId) edge.id).switchDirection(); - return edge; - } - - public HugeEdge switchToOutDirection() { - if (this.direction() == Directions.IN) { - return this.switchOwner(); - } - return this; - } - - public HugeVertex ownerVertex() { - return this.isOutEdge ? this.sourceVertex() : this.targetVertex(); - } - - public HugeVertex sourceVertex() { - this.checkAdjacentVertexExist(this.sourceVertex); - return this.sourceVertex; - } - - public void sourceVertex(HugeVertex sourceVertex) { - this.sourceVertex = sourceVertex; - } - - public HugeVertex targetVertex() { - this.checkAdjacentVertexExist(this.targetVertex); - return this.targetVertex; - } - - public void targetVertex(HugeVertex targetVertex) { - this.targetVertex = targetVertex; - } - - private void checkAdjacentVertexExist(HugeVertex vertex) { - if (vertex.schemaLabel().undefined() && - this.graph().checkAdjacentVertexExist()) { - throw new HugeException("Vertex '%s' does not exist", vertex.id()); - } - } - - public boolean belongToLabels(String... edgeLabels) { - if (edgeLabels.length == 0) { - return true; - } - - // Does edgeLabels contain me - for (String label : edgeLabels) { - if (label.equals(this.label())) { - return true; - } - } - return false; - } - - public boolean belongToVertex(HugeVertex vertex) { - return vertex != null && (vertex.equals(this.sourceVertex) || - vertex.equals(this.targetVertex)); - } - - public HugeVertex otherVertex(HugeVertex vertex) { - if (vertex == this.sourceVertex()) { - return this.targetVertex(); - } else { - E.checkArgument(vertex == this.targetVertex(), - "Invalid argument vertex '%s', must be in [%s, %s]", - vertex, this.sourceVertex(), this.targetVertex()); - return this.sourceVertex(); - } - } - - public HugeVertex otherVertex() { - return this.isOutEdge ? this.targetVertex() : this.sourceVertex(); - } - - /** - * Clear properties of the edge, and set `removed` true - * @return a new edge - */ - public HugeEdge prepareRemoved() { - HugeEdge edge = this.clone(); - edge.removed(true); - edge.resetProperties(); - return edge; - } - - @Override - public HugeEdge copy() { - HugeEdge edge = this.clone(); - edge.copyProperties(this); - return edge; - } - - @Override - protected HugeEdge clone() { - try { - return (HugeEdge) super.clone(); - } catch (CloneNotSupportedException e) { - throw new HugeException("Failed to clone HugeEdge", e); - } - } - - @Override - public String toString() { - return StringFactory.edgeString(this); - } - - public static final EdgeId getIdValue(Object idValue, - boolean returnNullIfError) { - Id id = HugeElement.getIdValue(idValue); - if (id == null || id instanceof EdgeId) { - return (EdgeId) id; - } - return EdgeId.parse(id.asString(), returnNullIfError); - } - - @Watched - public static HugeEdge constructEdge(HugeVertex ownerVertex, - boolean isOutEdge, - EdgeLabel edgeLabel, - String sortValues, - Id otherVertexId) { - HugeGraph graph = ownerVertex.graph(); - VertexLabel srcLabel = graph.vertexLabelOrNone(edgeLabel.sourceLabel()); - VertexLabel tgtLabel = graph.vertexLabelOrNone(edgeLabel.targetLabel()); - - VertexLabel otherVertexLabel; - if (isOutEdge) { - ownerVertex.correctVertexLabel(srcLabel); - otherVertexLabel = tgtLabel; - } else { - ownerVertex.correctVertexLabel(tgtLabel); - otherVertexLabel = srcLabel; - } - HugeVertex otherVertex = new HugeVertex(graph, otherVertexId, - otherVertexLabel); - - ownerVertex.propNotLoaded(); - otherVertex.propNotLoaded(); - - HugeEdge edge = new HugeEdge(graph, null, edgeLabel); - edge.name(sortValues); - edge.vertices(isOutEdge, ownerVertex, otherVertex); - edge.assignId(); - - if (isOutEdge) { - ownerVertex.addOutEdge(edge); - otherVertex.addInEdge(edge.switchOwner()); - } else { - ownerVertex.addInEdge(edge); - otherVertex.addOutEdge(edge.switchOwner()); - } - - return edge; - } -} diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/backup/HugeElement.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/backup/HugeElement.java deleted file mode 100644 index 723cbc2d2..000000000 --- a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/backup/HugeElement.java +++ /dev/null @@ -1,410 +0,0 @@ -/* - * Copyright 2017 HugeGraph Authors - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with this - * work for additional information regarding copyright ownership. The ASF - * licenses this file to You under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.baidu.hugegraph.serializer.direct.struct; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.UUID; -import java.util.function.Supplier; - -import org.apache.tinkerpop.gremlin.structure.Property; -import org.apache.tinkerpop.gremlin.structure.T; -import org.apache.tinkerpop.gremlin.structure.util.ElementHelper; -import org.eclipse.collections.api.iterator.IntIterator; -import org.eclipse.collections.api.map.primitive.MutableIntObjectMap; - -import com.baidu.hugegraph.backend.id.EdgeId; -import com.baidu.hugegraph.backend.id.IdGenerator; -import com.baidu.hugegraph.perf.PerfUtil.Watched; -import com.baidu.hugegraph.schema.PropertyKey; -import com.baidu.hugegraph.schema.SchemaLabel; -import com.baidu.hugegraph.serializer.direct.BytesBuffer; -import com.baidu.hugegraph.serializer.direct.util.Id; -import com.baidu.hugegraph.structure.Element; -import com.baidu.hugegraph.type.HugeType; -import com.baidu.hugegraph.type.Idfiable; -import com.baidu.hugegraph.type.define.Cardinality; -import com.baidu.hugegraph.util.CollectionUtil; -import com.baidu.hugegraph.util.E; -import com.baidu.hugegraph.util.InsertionOrderUtil; -import com.baidu.hugegraph.util.collection.CollectionFactory; - -public abstract class HugeElement implements Element, GraphType, Idfiable { - - private static final MutableIntObjectMap> EMPTY_MAP = - CollectionFactory.newIntObjectMap(); - private static final int MAX_PROPERTIES = BytesBuffer.UINT16_MAX; - - private MutableIntObjectMap> properties; - - String label; - - public HugeElement() { - this.properties = EMPTY_MAP; - } - - public String label() { - return this.label; - } - - public abstract SchemaLabel schemaLabel(); - - protected abstract HugeProperty newProperty(PropertyKey pk, V val); - - protected abstract void onUpdateProperty(Cardinality cardinality, - HugeProperty prop); - - protected abstract boolean ensureFilledProperties(boolean throwIfNotExist); - - public Set getPropertyKeys() { - Set propKeys = InsertionOrderUtil.newSet(); - IntIterator keys = this.properties.keysView().intIterator(); - while (keys.hasNext()) { - propKeys.add(IdGenerator.of(keys.next())); - } - return propKeys; - } - - public Collection> getProperties() { - return this.properties.values(); - } - - public Collection> getFilledProperties() { - this.ensureFilledProperties(true); - return this.getProperties(); - } - - public Map getPropertiesMap() { - Map props = InsertionOrderUtil.newMap(); - for (HugeProperty prop : this.properties.values()) { - props.put(prop.propertyKey().id(), prop.value()); - } - // TODO: return MutableIntObjectMap for this method? - return props; - } - - public Collection> getAggregateProperties() { - List> aggrProps = InsertionOrderUtil.newList(); - for (HugeProperty prop : this.properties.values()) { - if (prop.type().isAggregateProperty()) { - aggrProps.add(prop); - } - } - return aggrProps; - } - - @SuppressWarnings("unchecked") - public HugeProperty getProperty(Id key) { - return (HugeProperty) this.properties.get(intFromId(key)); - } - - @SuppressWarnings("unchecked") - public V getPropertyValue(Id key) { - HugeProperty prop = this.properties.get(intFromId(key)); - if (prop == null) { - return null; - } - return (V) prop.value(); - } - - public boolean hasProperty(Id key) { - return this.properties.containsKey(intFromId(key)); - } - - public boolean hasProperties() { - return this.properties.size() > 0; - } - - public int sizeOfProperties() { - return this.properties.size(); - } - - public int sizeOfSubProperties() { - int size = 0; - for (HugeProperty p : this.properties.values()) { - size++; - if (p.propertyKey().cardinality() != Cardinality.SINGLE && - p.value() instanceof Collection) { - size += ((Collection) p.value()).size(); - } - } - return size; - } - - @Watched(prefix = "element") - public HugeProperty setProperty(HugeProperty prop) { - if (this.properties == EMPTY_MAP) { - this.properties = CollectionFactory.newIntObjectMap(); - } - PropertyKey pkey = prop.propertyKey(); - - E.checkArgument(this.properties.containsKey(intFromId(pkey.id())) || - this.properties.size() < MAX_PROPERTIES, - "Exceeded the maximum number of properties"); - return this.properties.put(intFromId(pkey.id()), prop); - } - - public HugeProperty removeProperty(Id key) { - return this.properties.remove(intFromId(key)); - } - - public HugeProperty addProperty(PropertyKey pkey, V value) { - return this.addProperty(pkey, value, false); - } - - @Watched(prefix = "element") - public HugeProperty addProperty(PropertyKey pkey, V value, - boolean notify) { - HugeProperty prop = null; - switch (pkey.cardinality()) { - case SINGLE: - prop = this.newProperty(pkey, value); - if (notify) { - /* - * NOTE: this method should be called before setProperty() - * because tx need to delete index without the new property - */ - this.onUpdateProperty(pkey.cardinality(), prop); - } - this.setProperty(prop); - break; - case SET: - prop = this.addProperty(pkey, value, HashSet::new); - if (notify) { - this.onUpdateProperty(pkey.cardinality(), prop); - } - break; - case LIST: - prop = this.addProperty(pkey, value, ArrayList::new); - if (notify) { - this.onUpdateProperty(pkey.cardinality(), prop); - } - break; - default: - assert false; - break; - } - return prop; - } - - @Watched(prefix = "element") - @SuppressWarnings({ "rawtypes", "unchecked" }) - private HugeProperty addProperty(PropertyKey pkey, V value, - Supplier> supplier) { - assert pkey.cardinality().multiple(); - HugeProperty> property; - if (this.hasProperty(pkey.id())) { - property = this.getProperty(pkey.id()); - } else { - property = this.newProperty(pkey, supplier.get()); - this.setProperty(property); - } - - Collection values; - if (pkey.cardinality() == Cardinality.SET) { - if (value instanceof Set) { - values = (Set) value; - } else { - values = CollectionUtil.toSet(value); - } - } else { - assert pkey.cardinality() == Cardinality.LIST; - if (value instanceof List) { - values = (List) value; - } else { - values = CollectionUtil.toList(value); - } - } - property.value().addAll(pkey.validValueOrThrow(values)); - - // Any better ways? - return property; - } - - public void resetProperties() { - this.properties = CollectionFactory.newIntObjectMap(); - this.propLoaded = false; - } - - protected void copyProperties(HugeElement element) { - if (element.properties == EMPTY_MAP) { - this.properties = EMPTY_MAP; - } else { - this.properties = CollectionFactory.newIntObjectMap( - element.properties); - } - this.propLoaded = true; - } - - public HugeElement copyAsFresh() { - HugeElement elem = this.copy(); - elem.fresh = true; - return elem; - } - - public abstract HugeElement copy(); - - @Override - public boolean equals(Object obj) { - if (!(obj instanceof Element)) { - return false; - } - - Element other = (Element) obj; - if (this.id() == null) { - return false; - } - - return this.id().equals(other.id()); - } - - /** - * Classify parameter list(pairs) from call request - * @param keyValues The property key-value pair of the vertex or edge - * @return Key-value pairs that are classified and processed - */ - @Watched(prefix = "element") - public static final ElementKeys classifyKeys(Object... keyValues) { - ElementKeys elemKeys = new ElementKeys(); - - if ((keyValues.length & 1) == 1) { - throw Element.Exceptions.providedKeyValuesMustBeAMultipleOfTwo(); - } - for (int i = 0; i < keyValues.length; i = i + 2) { - Object key = keyValues[i]; - Object val = keyValues[i + 1]; - - if (!(key instanceof String) && !(key instanceof T)) { - throw Element.Exceptions - .providedKeyValuesMustHaveALegalKeyOnEvenIndices(); - } - if (val == null) { - throw Property.Exceptions.propertyDoesNotExist(); - } - - if (key.equals(T.id)) { - elemKeys.id = val; - } else if (key.equals(T.label)) { - elemKeys.label = val; - } else { - elemKeys.keys.add(key.toString()); - } - } - return elemKeys; - } - - public static final Id getIdValue(HugeType type, Object idValue) { - assert type.isGraph(); - Id id = getIdValue(idValue); - if (type.isVertex()) { - return id; - } else { - if (id == null || id instanceof EdgeId) { - return id; - } - return EdgeId.parse(id.asString()); - } - } - - @Watched(prefix = "element") - protected static Id getIdValue(Object idValue) { - if (idValue == null) { - return null; - } - - if (idValue instanceof String) { - // String id - return IdGenerator.of((String) idValue); - } else if (idValue instanceof Number) { - // Long id - return IdGenerator.of(((Number) idValue).longValue()); - } else if (idValue instanceof UUID) { - // UUID id - return IdGenerator.of((UUID) idValue); - } else if (idValue instanceof Id) { - // Id itself - return (Id) idValue; - } else if (idValue instanceof Element) { - // Element - return (Id) ((Element) idValue).id(); - } - - // Throw if error type - throw new UnsupportedOperationException(String.format( - "Invalid element id: %s(%s)", - idValue, idValue.getClass().getSimpleName())); - } - - @Watched(prefix = "element") - public static final Object getLabelValue(Object... keyValues) { - Object labelValue = null; - for (int i = 0; i < keyValues.length; i = i + 2) { - if (keyValues[i].equals(T.label)) { - labelValue = keyValues[i + 1]; - if (labelValue instanceof String) { - ElementHelper.validateLabel((String) labelValue); - } - break; - } - } - return labelValue; - } - - public static int intFromId(Id id) { - E.checkArgument(id instanceof IdGenerator.LongId, - "Can't get number from %s(%s)", id, id.getClass()); - return ((IdGenerator.LongId) id).intValue(); - } - - public static final class ElementKeys { - - private Object label = null; - private Object id = null; - private Set keys = new HashSet<>(); - - public Object label() { - return this.label; - } - - public void label(Object label) { - this.label = label; - } - - public Object id() { - return this.id; - } - - public void id(Object id) { - this.id = id; - } - - public Set keys() { - return this.keys; - } - - public void keys(Set keys) { - this.keys = keys; - } - } -} diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/backup/HugeVertex.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/backup/HugeVertex.java deleted file mode 100644 index 02d02a51f..000000000 --- a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/backup/HugeVertex.java +++ /dev/null @@ -1,386 +0,0 @@ -/* - * Copyright 2017 HugeGraph Authors - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with this - * work for additional information regarding copyright ownership. The ASF - * licenses this file to You under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.baidu.hugegraph.serializer.direct.struct; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.Iterator; -import java.util.LinkedList; -import java.util.List; -import java.util.Set; - -import org.apache.logging.log4j.util.Strings; -import org.apache.tinkerpop.gremlin.structure.Direction; -import org.apache.tinkerpop.gremlin.structure.Edge; -import org.apache.tinkerpop.gremlin.structure.T; -import org.apache.tinkerpop.gremlin.structure.Vertex; -import org.apache.tinkerpop.gremlin.structure.VertexProperty; -import org.glassfish.jersey.internal.guava.Sets; - -import com.baidu.hugegraph.backend.id.IdGenerator; -import com.baidu.hugegraph.backend.query.ConditionQuery; -import com.baidu.hugegraph.backend.query.QueryResults; -import com.baidu.hugegraph.config.CoreOptions; -import com.baidu.hugegraph.perf.PerfUtil.Watched; -import com.baidu.hugegraph.schema.PropertyKey; -import com.baidu.hugegraph.schema.VertexLabel; -import com.baidu.hugegraph.serializer.direct.BytesBuffer; -import com.baidu.hugegraph.serializer.direct.util.Id; -import com.baidu.hugegraph.serializer.direct.util.SplicingIdGenerator; -import com.baidu.hugegraph.serializer.direct.util.HugeException; -import com.baidu.hugegraph.type.define.Cardinality; -import com.baidu.hugegraph.type.define.Directions; -import com.baidu.hugegraph.type.define.IdStrategy; -import com.baidu.hugegraph.util.E; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; - -/** - * We need a simple vertex struct for direct encode - * 1. id (only support number + string type) & treat as bytes - * 2. property, save a map - * - * So how could we deal with multi situation? (we judge before construct it), like: - * 1. idStrategy should be set / check before load - * 2. edges should do only in HugeEdge - * 3. any transaction actions should be ignored - **/ -public class HugeVertex extends HugeElement implements Vertex, Cloneable { - - private static final List EMPTY_LIST = ImmutableList.of(); - - private Id id; - protected Collection edges; - - public HugeVertex(Id id, String label) { - - E.checkArgumentNotNull(label, "Vertex label can't be null"); - this.label = label; - - this.id = id; - this.edges = EMPTY_LIST; - if (this.id != null) { - this.checkIdLength(); - } - } - - - @Override - public Id id() { - return this.id; - } - - @Watched(prefix = "vertex") - public void assignId(Id id) { - IdStrategy strategy = this.label.idStrategy(); - // Generate an id and assign - switch (strategy) { - case CUSTOMIZE_STRING: - assert !id.number(); - this.id = id; - break; - case CUSTOMIZE_NUMBER: - assert id.number(); - this.id = id; - break; - case CUSTOMIZE_UUID: - this.id = id.uuid() ? id : IdGenerator.of(id.asString(), true); - break; - case PRIMARY_KEY: - this.id = SplicingIdGenerator.instance().generate(this); - break; - default: - throw new HugeException("Unknown id strategy" + strategy); - } - this.checkIdLength(); - } - - protected void checkIdLength() { - assert this.id != null; - int len = this.id.asBytes().length; - if (len > BytesBuffer.ID_LEN_MAX) { - throw new HugeException("The max length of vertex id is 128, but got" + len); - } - - } - - public void correctVertexLabel(VertexLabel correctLabel) { - E.checkArgumentNotNull(correctLabel, "Vertex label can't be null"); - if (this.label != null && !this.label.undefined() && - !correctLabel.undefined()) { - E.checkArgument(this.label.equals(correctLabel), - "Vertex label can't be changed from '%s' to '%s'", - this.label, correctLabel); - } - this.label = correctLabel; - } - - @Watched(prefix = "vertex") - protected List primaryValues() { - E.checkArgument(this.label.idStrategy() == IdStrategy.PRIMARY_KEY, - "The id strategy '%s' don't have primary keys", - this.label.idStrategy()); - List primaryKeys = this.label.primaryKeys(); - E.checkArgument(!primaryKeys.isEmpty(), - "Primary key can't be empty for id strategy '%s'", - IdStrategy.PRIMARY_KEY); - - boolean encodeNumber = this.graph() - .option(CoreOptions.VERTEX_ENCODE_PK_NUMBER); - List propValues = new ArrayList<>(primaryKeys.size()); - for (Id pk : primaryKeys) { - HugeProperty property = this.getProperty(pk); - E.checkState(property != null, - "The value of primary key '%s' can't be null", - this.graph().propertyKey(pk).name()); - Object propValue = property.serialValue(encodeNumber); - if (Strings.EMPTY.equals(propValue)) { - propValue = ConditionQuery.INDEX_VALUE_EMPTY; - } - propValues.add(propValue); - } - return propValues; - } - - public boolean existsEdges() { - return this.edges.size() > 0; - } - - public Collection getEdges() { - return Collections.unmodifiableCollection(this.edges); - } - - public void addEdge(HugeEdge edge) { - if (this.edges == EMPTY_LIST) { - this.edges = newList(); - } - this.edges.add(edge); - } - - /** - * Add edge with direction OUT - * @param edge the out edge - */ - @Watched - public void addOutEdge(HugeEdge edge) { - if (edge.ownerVertex() == null) { - edge.sourceVertex(this); - } - E.checkState(edge.isDirection(Directions.OUT), - "The owner vertex('%s') of OUT edge '%s' should be '%s'", - edge.ownerVertex().id(), edge, this.id()); - this.addEdge(edge); - } - - /** - * Add edge with direction IN - * @param edge the in edge - */ - @Watched - public void addInEdge(HugeEdge edge) { - if (edge.ownerVertex() == null) { - edge.targetVertex(this); - } - E.checkState(edge.isDirection(Directions.IN), - "The owner vertex('%s') of IN edge '%s' should be '%s'", - edge.ownerVertex().id(), edge, this.id()); - this.addEdge(edge); - } - - public Iterator getEdges(Directions direction, String... edgeLabels) { - List list = new LinkedList<>(); - for (HugeEdge edge : this.edges) { - if (edge.matchDirection(direction) && - edge.belongToLabels(edgeLabels)) { - list.add(edge); - } - } - return list.iterator(); - } - - public Iterator getVertices(Directions direction, - String... edgeLabels) { - List list = new LinkedList<>(); - Iterator edges = this.getEdges(direction, edgeLabels); - while (edges.hasNext()) { - HugeEdge edge = (HugeEdge) edges.next(); - list.add(edge.otherVertex(this)); - } - return list.iterator(); - } - - @Watched(prefix = "vertex") - @Override - public Iterator vertices(Direction direction, - String... edgeLabels) { - Iterator edges = this.edges(direction, edgeLabels); - return this.graph().adjacentVertices(edges); - } - - @Watched(prefix = "vertex") - @Override - public VertexProperty property( - VertexProperty.Cardinality cardinality, - String key, V value, Object... objects) { - if (objects.length != 0 && objects[0].equals(T.id)) { - throw VertexProperty.Exceptions.userSuppliedIdsNotSupported(); - } - // TODO: extra props: objects - if (objects.length != 0) { - throw VertexProperty.Exceptions.metaPropertiesNotSupported(); - } - - PropertyKey propertyKey = this.graph().propertyKey(key); - /* - * g.AddV("xxx").property("key1", val1).property("key2", val2) - * g.AddV("xxx").property(single, "key1", val1) - * .property(list, "key2", val2) - * - * The cardinality single may be user supplied single, it may also be - * that user doesn't supplied cardinality, when it is latter situation, - * we shouldn't check it. Because of this reason, we are forced to - * give up the check of user supplied cardinality single. - * The cardinality not single must be user supplied, so should check it - */ - if (cardinality != VertexProperty.Cardinality.single) { - E.checkArgument(propertyKey.cardinality() == - Cardinality.convert(cardinality), - "Invalid cardinality '%s' for property key '%s', " + - "expect '%s'", cardinality, key, - propertyKey.cardinality().string()); - } - - // Check key in vertex label - E.checkArgument(VertexLabel.OLAP_VL.equals(this.label) || - this.label.properties().contains(propertyKey.id()), - "Invalid property '%s' for vertex label '%s'", - key, this.label); - // Primary-Keys can only be set once - if (this.schemaLabel().primaryKeys().contains(propertyKey.id())) { - E.checkArgument(!this.hasProperty(propertyKey.id()), - "Can't update primary key: '%s'", key); - } - - @SuppressWarnings("unchecked") - VertexProperty prop = (VertexProperty) this.addProperty( - propertyKey, value, !this.fresh()); - return prop; - } - - @Watched(prefix = "vertex") - @Override - protected HugeVertexProperty newProperty(PropertyKey pkey, V val) { - return new HugeVertexProperty<>(this, pkey, val); - } - - @Watched(prefix = "vertex") - @Override - protected boolean ensureFilledProperties(boolean throwIfNotExist) { - if (this.isPropLoaded()) { - this.updateToDefaultValueIfNone(); - return true; - } - - // Skip query if there is no any property key in schema - if (this.schemaLabel().properties().isEmpty()) { - this.propLoaded(); - return true; - } - - // NOTE: only adjacent vertex will reach here - Iterator vertices = this.graph().adjacentVertex(this.id()); - HugeVertex vertex = (HugeVertex) QueryResults.one(vertices); - if (vertex == null && !throwIfNotExist) { - return false; - } - E.checkState(vertex != null, "Vertex '%s' does not exist", this.id); - if (vertex.schemaLabel().undefined() || - !vertex.schemaLabel().equals(this.schemaLabel())) { - // Update vertex label of dangling edge to undefined - this.correctVertexLabel(VertexLabel.undefined(this.graph())); - vertex.resetProperties(); - } - this.copyProperties(vertex); - this.updateToDefaultValueIfNone(); - return true; - } - - @Watched(prefix = "vertex") - @SuppressWarnings("unchecked") // (VertexProperty) prop - @Override - public Iterator> properties(String... keys) { - // TODO: Compatible with TinkerPop properties() (HugeGraph-742) - this.ensureFilledProperties(true); - - // Capacity should be about the following size - int propsCapacity = keys.length == 0 ? - this.sizeOfProperties() : - keys.length; - List> props = new ArrayList<>(propsCapacity); - - if (keys.length == 0) { - for (HugeProperty prop : this.getProperties()) { - assert prop instanceof VertexProperty; - props.add((VertexProperty) prop); - } - } else { - for (String key : keys) { - Id pkeyId; - try { - pkeyId = this.graph().propertyKey(key).id(); - } catch (IllegalArgumentException ignored) { - continue; - } - HugeProperty prop = this.getProperty(pkeyId); - if (prop == null) { - // Not found - continue; - } - assert prop instanceof VertexProperty; - props.add((VertexProperty) prop); - } - } - - return props.iterator(); - } - - public boolean valid() { - try { - return this.ensureFilledProperties(false); - } catch (Throwable e) { - // Generally the program can't get here - return false; - } - } - - public static final Id getIdValue(Object idValue) { - return HugeElement.getIdValue(idValue); - } - - // we don't use origin sets now - private static Set newSet() { - return Sets.newHashSet(); - } - - private static List newList() { - return Lists.newArrayList(); - } -} diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/backup/OldSerializer.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/backup/OldSerializer.java deleted file mode 100644 index b388eb9f0..000000000 --- a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/backup/OldSerializer.java +++ /dev/null @@ -1,1294 +0,0 @@ -/* - * Copyright 2017 HugeGraph Authors - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with this - * work for additional information regarding copyright ownership. The ASF - * licenses this file to You under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.baidu.hugegraph.serializer.direct.backup; - -import static com.baidu.hugegraph.serializer.direct.util.Id.*; -import static com.baidu.hugegraph.serializer.direct.util.Id.IdType.*; - -import java.util.Arrays; -import java.util.Collection; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - -import org.apache.commons.lang.NotImplementedException; - -import com.baidu.hugegraph.HugeGraph; -import com.baidu.hugegraph.backend.BackendException; -import com.baidu.hugegraph.backend.id.EdgeId; -import com.baidu.hugegraph.backend.id.Id; -import com.baidu.hugegraph.backend.id.IdGenerator; -import com.baidu.hugegraph.backend.page.PageState; -import com.baidu.hugegraph.backend.query.Condition; -import com.baidu.hugegraph.backend.query.Condition.RangeConditions; -import com.baidu.hugegraph.backend.query.ConditionQuery; -import com.baidu.hugegraph.backend.query.IdPrefixQuery; -import com.baidu.hugegraph.backend.query.IdRangeQuery; -import com.baidu.hugegraph.backend.query.Query; -import com.baidu.hugegraph.backend.serializer.BinaryBackendEntry.BinaryId; -import com.baidu.hugegraph.backend.store.BackendEntry; -import com.baidu.hugegraph.backend.store.BackendEntry.BackendColumn; -import com.baidu.hugegraph.config.HugeConfig; -import com.baidu.hugegraph.schema.EdgeLabel; -import com.baidu.hugegraph.schema.IndexLabel; -import com.baidu.hugegraph.schema.PropertyKey; -import com.baidu.hugegraph.schema.SchemaElement; -import com.baidu.hugegraph.schema.VertexLabel; -import com.baidu.hugegraph.serializer.direct.BinaryBackendEntry; -import com.baidu.hugegraph.serializer.direct.BytesBuffer; -import com.baidu.hugegraph.serializer.direct.DataType; -import com.baidu.hugegraph.serializer.direct.HugeType; -import com.baidu.hugegraph.serializer.direct.struct.HugeEdge; -import com.baidu.hugegraph.serializer.direct.struct.HugeElement; -import com.baidu.hugegraph.serializer.direct.struct.HugeVertex; -import com.baidu.hugegraph.serializer.direct.util.Id; -import com.baidu.hugegraph.serializer.direct.util.StringEncoding; -import com.baidu.hugegraph.structure.HugeEdge; -import com.baidu.hugegraph.structure.HugeEdgeProperty; -import com.baidu.hugegraph.structure.HugeElement; -import com.baidu.hugegraph.structure.HugeIndex; -import com.baidu.hugegraph.structure.HugeProperty; -import com.baidu.hugegraph.structure.HugeVertex; -import com.baidu.hugegraph.structure.HugeVertexProperty; -import com.baidu.hugegraph.type.HugeType; -import com.baidu.hugegraph.type.define.AggregateType; -import com.baidu.hugegraph.type.define.Cardinality; -import com.baidu.hugegraph.type.define.Directions; -import com.baidu.hugegraph.type.define.Frequency; -import com.baidu.hugegraph.type.define.HugeKeys; -import com.baidu.hugegraph.type.define.IdStrategy; -import com.baidu.hugegraph.type.define.IndexType; -import com.baidu.hugegraph.type.define.SchemaStatus; -import com.baidu.hugegraph.type.define.WriteType; -import com.baidu.hugegraph.util.Bytes; -import com.baidu.hugegraph.util.E; -import com.baidu.hugegraph.util.JsonUtil; -import com.baidu.hugegraph.util.NumericUtil; - -import sun.plugin2.message.AbstractSerializer; - -public class OldSerializer extends AbstractSerializer { - - /* - * Id is stored in column name if keyWithIdPrefix=true like RocksDB, - * else stored in rowkey like HBase. - */ - private final boolean keyWithIdPrefix; - private final boolean indexWithIdPrefix; - private final boolean enablePartition; - - public OldSerializer() { - this(true, true, false); - } - - public OldSerializer(HugeConfig config) { - this(true, true, false); - } - - public OldSerializer(boolean keyWithIdPrefix, - boolean indexWithIdPrefix, - boolean enablePartition) { - this.keyWithIdPrefix = keyWithIdPrefix; - this.indexWithIdPrefix = indexWithIdPrefix; - this.enablePartition = enablePartition; - } - - @Override - protected BinaryBackendEntry newBackendEntry(HugeType type, Id id) { - if (type.isVertex()) { - BytesBuffer buffer = BytesBuffer.allocate(2 + 1 + id.length()); - writePartitionedId(HugeType.VERTEX, id, buffer); - return new BinaryBackendEntry(type, new BinaryId(buffer.bytes(), id)); - } - - if (type.isEdge()) { - E.checkState(id instanceof BinaryId, - "Expect a BinaryId for BackendEntry with edge id"); - return new BinaryBackendEntry(type, (BinaryId) id); - } - - if (type.isIndex()) { - BytesBuffer buffer = BytesBuffer.allocate(1 + id.length()); - byte[] idBytes = buffer.writeIndexId(id, type).bytes(); - return new BinaryBackendEntry(type, new BinaryId(idBytes, id)); - } - - BytesBuffer buffer = BytesBuffer.allocate(1 + id.length()); - byte[] idBytes = buffer.writeId(id).bytes(); - return new BinaryBackendEntry(type, new BinaryId(idBytes, id)); - } - - protected final BinaryBackendEntry newBackendEntry(HugeVertex vertex) { - return newBackendEntry(vertex.type(), vertex.id()); - } - - protected final BinaryBackendEntry newBackendEntry(HugeEdge edge) { - BinaryId id = writeEdgeId(edge.idWithDirection()); - return newBackendEntry(edge.type(), id); - } - - protected final BinaryBackendEntry newBackendEntry(SchemaElement elem) { - return newBackendEntry(elem.type(), elem.id()); - } - - protected byte[] formatSyspropName(Id id, HugeKeys col) { - int idLen = this.keyWithIdPrefix ? 1 + id.length() : 0; - BytesBuffer buffer = BytesBuffer.allocate(idLen + 1 + 1); - byte sysprop = HugeType.SYS_PROPERTY.code(); - if (this.keyWithIdPrefix) { - buffer.writeId(id); - } - return buffer.write(sysprop).write(col.code()).bytes(); - } - - protected byte[] formatSyspropName(BinaryId id, HugeKeys col) { - int idLen = this.keyWithIdPrefix ? id.length() : 0; - BytesBuffer buffer = BytesBuffer.allocate(idLen + 1 + 1); - byte sysprop = HugeType.SYS_PROPERTY.code(); - if (this.keyWithIdPrefix) { - buffer.write(id.asBytes()); - } - return buffer.write(sysprop).write(col.code()).bytes(); - } - - protected BackendColumn formatLabel(HugeElement elem) { - BackendColumn col = new BackendColumn(); - col.name = this.formatSyspropName(elem.id(), HugeKeys.LABEL); - Id label = elem.schemaLabel().id(); - BytesBuffer buffer = BytesBuffer.allocate(label.length() + 1); - col.value = buffer.writeId(label).bytes(); - return col; - } - - protected byte[] formatPropertyName(HugeProperty prop) { - Id id = prop.element().id(); - int idLen = this.keyWithIdPrefix ? 1 + id.length() : 0; - Id pkeyId = prop.propertyKey().id(); - BytesBuffer buffer = BytesBuffer.allocate(idLen + 2 + pkeyId.length()); - if (this.keyWithIdPrefix) { - buffer.writeId(id); - } - buffer.write(prop.type().code()); - buffer.writeId(pkeyId); - return buffer.bytes(); - } - - protected BackendColumn formatProperty(HugeProperty prop) { - BytesBuffer buffer = BytesBuffer.allocate(BytesBuffer.BUF_PROPERTY); - buffer.writeProperty(prop.propertyKey(), prop.value()); - return BackendColumn.of(this.formatPropertyName(prop), buffer.bytes()); - } - - protected void parseProperty(Id pkeyId, BytesBuffer buffer, - HugeElement owner) { - PropertyKey pkey = owner.graph().propertyKey(pkeyId); - - // Parse value - Object value = buffer.readProperty(pkey); - - // Set properties of vertex/edge - if (pkey.cardinality() == Cardinality.SINGLE) { - owner.addProperty(pkey, value); - } else { - if (!(value instanceof Collection)) { - throw new BackendException( - "Invalid value of non-single property: %s", value); - } - owner.addProperty(pkey, value); - } - } - - protected void formatProperties(Collection> props, - BytesBuffer buffer) { - // Write properties size - buffer.writeVInt(props.size()); - - // Write properties data - for (HugeProperty property : props) { - PropertyKey pkey = property.propertyKey(); - buffer.writeVInt(SchemaElement.schemaId(pkey.id())); - buffer.writeProperty(pkey, property.value()); - } - } - - protected void parseProperties(BytesBuffer buffer, HugeElement owner) { - int size = buffer.readVInt(); - assert size >= 0; - for (int i = 0; i < size; i++) { - Id pkeyId = IdGenerator.of(buffer.readVInt()); - this.parseProperty(pkeyId, buffer, owner); - } - } - - protected void formatExpiredTime(long expiredTime, BytesBuffer buffer) { - buffer.writeVLong(expiredTime); - } - - protected void parseExpiredTime(BytesBuffer buffer, HugeElement element) { - element.expiredTime(buffer.readVLong()); - } - - protected byte[] formatEdgeValue(HugeEdge edge) { - int propsCount = edge.sizeOfProperties(); - BytesBuffer buffer = BytesBuffer.allocate(4 + 16 * propsCount); - - // Write edge id - //buffer.writeId(edge.id()); - - // Write edge properties - this.formatProperties(edge.getProperties(), buffer); - - // Write edge expired time if needed - if (edge.hasTtl()) { - this.formatExpiredTime(edge.expiredTime(), buffer); - } - - return buffer.bytes(); - } - - protected void parseEdge(BackendColumn col, HugeVertex vertex, - HugeGraph graph) { - // owner-vertex + dir + edge-label + sort-values + other-vertex - - BytesBuffer buffer = BytesBuffer.wrap(col.name); - if (this.keyWithIdPrefix) { - // Consume owner-vertex id - buffer.readId(); - } - byte type = buffer.read(); - Id labelId = buffer.readId(); - String sortValues = buffer.readStringWithEnding(); - Id otherVertexId = buffer.readId(); - - boolean direction = EdgeId.isOutDirectionFromCode(type); - EdgeLabel edgeLabel = graph.edgeLabelOrNone(labelId); - - // Construct edge - HugeEdge edge = HugeEdge.constructEdge(vertex, direction, edgeLabel, - sortValues, otherVertexId); - - // Parse edge-id + edge-properties - buffer = BytesBuffer.wrap(col.value); - - //Id id = buffer.readId(); - - // Parse edge properties - this.parseProperties(buffer, edge); - - // Parse edge expired time if needed - if (edge.hasTtl()) { - this.parseExpiredTime(buffer, edge); - } - } - - protected void parseVertex(byte[] value, HugeVertex vertex) { - BytesBuffer buffer = BytesBuffer.wrap(value); - - // Parse vertex label - VertexLabel label = vertex.graph().vertexLabelOrNone(buffer.readId()); - vertex.correctVertexLabel(label); - - // Parse properties - this.parseProperties(buffer, vertex); - - // Parse vertex expired time if needed - if (vertex.hasTtl()) { - this.parseExpiredTime(buffer, vertex); - } - } - - protected void parseColumn(BackendColumn col, HugeVertex vertex) { - BytesBuffer buffer = BytesBuffer.wrap(col.name); - Id id = this.keyWithIdPrefix ? buffer.readId() : vertex.id(); - E.checkState(buffer.remaining() > 0, "Missing column type"); - byte type = buffer.read(); - // Parse property - if (type == HugeType.PROPERTY.code()) { - Id pkeyId = buffer.readId(); - this.parseProperty(pkeyId, BytesBuffer.wrap(col.value), vertex); - } - // Parse edge - else if (type == HugeType.EDGE_IN.code() || - type == HugeType.EDGE_OUT.code()) { - this.parseEdge(col, vertex, vertex.graph()); - } - // Parse system property - else if (type == HugeType.SYS_PROPERTY.code()) { - // pass - } - // Invalid entry - else { - E.checkState(false, "Invalid entry(%s) with unknown type(%s): 0x%s", - id, type & 0xff, Bytes.toHex(col.name)); - } - } - - protected byte[] formatIndexName(HugeIndex index) { - BytesBuffer buffer; - Id elemId = index.elementId(); - if (!this.indexWithIdPrefix) { - int idLen = 1 + elemId.length(); - buffer = BytesBuffer.allocate(idLen); - } else { - Id indexId = index.id(); - HugeType type = index.type(); - if (!type.isNumericIndex() && indexIdLengthExceedLimit(indexId)) { - indexId = index.hashId(); - } - int idLen = 1 + elemId.length() + 1 + indexId.length(); - buffer = BytesBuffer.allocate(idLen); - // Write index-id - buffer.writeIndexId(indexId, type); - } - // Write element-id - buffer.writeId(elemId); - // Write expired time if needed - if (index.hasTtl()) { - buffer.writeVLong(index.expiredTime()); - } - - return buffer.bytes(); - } - - protected void parseIndexName(HugeGraph graph, ConditionQuery query, - BinaryBackendEntry entry, - HugeIndex index, Object fieldValues) { - for (BackendColumn col : entry.columns()) { - if (indexFieldValuesUnmatched(col.value, fieldValues)) { - // Skip if field-values is not matched (just the same hash) - continue; - } - BytesBuffer buffer = BytesBuffer.wrap(col.name); - if (this.indexWithIdPrefix) { - buffer.readIndexId(index.type()); - } - Id elemId = buffer.readId(); - long expiredTime = index.hasTtl() ? buffer.readVLong() : 0L; - index.elementIds(elemId, expiredTime); - } - } - - @Override - public BackendEntry writeVertex(HugeVertex vertex) { - if (vertex.olap()) { - return this.writeOlapVertex(vertex); - } - - BinaryBackendEntry entry = newBackendEntry(vertex); - - if (vertex.removed()) { - return entry; - } - - int propsCount = vertex.sizeOfProperties(); - BytesBuffer buffer = BytesBuffer.allocate(8 + 16 * propsCount); - - // Write vertex label - buffer.writeId(vertex.schemaLabel().id()); - - // Write all properties of the vertex - this.formatProperties(vertex.getProperties(), buffer); - - // Write vertex expired time if needed - if (vertex.hasTtl()) { - entry.ttl(vertex.ttl()); - this.formatExpiredTime(vertex.expiredTime(), buffer); - } - - // Fill column - byte[] name = this.keyWithIdPrefix ? - entry.id().asBytes() : BytesBuffer.BYTES_EMPTY; - entry.column(name, buffer.bytes()); - - return entry; - } - - @Override - public BackendEntry writeOlapVertex(HugeVertex vertex) { - BinaryBackendEntry entry = newBackendEntry(HugeType.OLAP, vertex.id()); - BytesBuffer buffer = BytesBuffer.allocate(8 + 16); - - Collection> properties = vertex.getProperties(); - if (properties.size() != 1) { - E.checkArgument(false, - "Expect 1 property for olap vertex, but got %s", - properties.size()); - } - HugeProperty property = properties.iterator().next(); - PropertyKey propertyKey = property.propertyKey(); - buffer.writeVInt(SchemaElement.schemaId(propertyKey.id())); - buffer.writeProperty(propertyKey, property.value()); - - // Fill column - byte[] name = this.keyWithIdPrefix ? - entry.id().asBytes() : BytesBuffer.BYTES_EMPTY; - entry.column(name, buffer.bytes()); - entry.subId(propertyKey.id()); - entry.olap(true); - return entry; - } - - @Override - public BackendEntry writeVertexProperty(HugeVertexProperty prop) { - throw new NotImplementedException("Unsupported writeVertexProperty()"); - } - - @Override - public HugeVertex readVertex(HugeGraph graph, BackendEntry bytesEntry) { - if (bytesEntry == null) { - return null; - } - BinaryBackendEntry entry = this.convertEntry(bytesEntry); - - // Parse id - Id id = entry.id().origin(); - Id vid = id.edge() ? ((EdgeId) id).ownerVertexId() : id; - HugeVertex vertex = new HugeVertex(graph, vid, VertexLabel.NONE); - - // Parse all properties and edges of a Vertex - Iterator iterator = entry.columns().iterator(); - for (int index = 0; iterator.hasNext(); index++) { - BackendColumn col = iterator.next(); - if (entry.type().isEdge()) { - // NOTE: the entry id type is vertex even if entry type is edge - // Parse vertex edges - this.parseColumn(col, vertex); - } else { - assert entry.type().isVertex(); - // Parse vertex properties - assert entry.columnsSize() >= 1 : entry.columnsSize(); - if (index == 0) { - this.parseVertex(col.value, vertex); - } else { - this.parseVertexOlap(col.value, vertex); - } - } - } - - return vertex; - } - - protected void parseVertexOlap(byte[] value, HugeVertex vertex) { - BytesBuffer buffer = BytesBuffer.wrap(value); - Id pkeyId = IdGenerator.of(buffer.readVInt()); - this.parseProperty(pkeyId, buffer, vertex); - } - - @Override - public BackendEntry writeEdge(HugeEdge edge) { - BinaryBackendEntry entry = newBackendEntry(edge); - byte[] name = this.keyWithIdPrefix ? - entry.id().asBytes() : BytesBuffer.BYTES_EMPTY; - byte[] value = this.formatEdgeValue(edge); - entry.column(name, value); - - if (edge.hasTtl()) { - entry.ttl(edge.ttl()); - } - - return entry; - } - - @Override - public BackendEntry writeEdgeProperty(HugeEdgeProperty prop) { - // TODO: entry.column(this.formatProperty(prop)); - throw new NotImplementedException("Unsupported writeEdgeProperty()"); - } - - @Override - public HugeEdge readEdge(HugeGraph graph, BackendEntry bytesEntry) { - HugeVertex vertex = this.readVertex(graph, bytesEntry); - Collection edges = vertex.getEdges(); - if (edges.size() != 1) { - E.checkState(false, - "Expect 1 edge in vertex, but got %s", - edges.size()); - } - return edges.iterator().next(); - } - - @Override - public BackendEntry writeIndex(HugeIndex index) { - BinaryBackendEntry entry; - if (index.fieldValues() == null && index.elementIds().size() == 0) { - /* - * When field-values is null and elementIds size is 0, it is - * meaningful for deletion of index data by index label. - * TODO: improve - */ - entry = this.formatILDeletion(index); - } else { - Id id = index.id(); - HugeType type = index.type(); - byte[] value = null; - if (!type.isNumericIndex() && indexIdLengthExceedLimit(id)) { - id = index.hashId(); - // Save field-values as column value if the key is a hash string - value = StringEncoding.encode(index.fieldValues().toString()); - } - - entry = newBackendEntry(type, id); - if (index.indexLabel().olap()) { - entry.olap(true); - } - entry.column(this.formatIndexName(index), value); - entry.subId(index.elementId()); - - if (index.hasTtl()) { - entry.ttl(index.ttl()); - } - } - return entry; - } - - @Override - public HugeIndex readIndex(HugeGraph graph, ConditionQuery query, - BackendEntry bytesEntry) { - if (bytesEntry == null) { - return null; - } - - BinaryBackendEntry entry = this.convertEntry(bytesEntry); - // NOTE: index id without length prefix - byte[] bytes = entry.id().asBytes(); - HugeIndex index = HugeIndex.parseIndexId(graph, entry.type(), bytes); - - Object fieldValues = null; - if (!index.type().isRangeIndex()) { - fieldValues = query.condition(HugeKeys.FIELD_VALUES); - if (!index.fieldValues().equals(fieldValues)) { - // Update field-values for hashed or encoded index-id - index.fieldValues(fieldValues); - } - } - - this.parseIndexName(graph, query, entry, index, fieldValues); - return index; - } - - @Override - public BackendEntry writeId(HugeType type, Id id) { - return newBackendEntry(type, id); - } - - @Override - protected Id writeQueryId(HugeType type, Id id) { - if (type.isEdge()) { - id = writeEdgeId(id); - } else if (type.isVertex()) { - BytesBuffer buffer = BytesBuffer.allocate(2 + 1 + id.length()); - writePartitionedId(HugeType.VERTEX, id, buffer); - id = new BinaryId(buffer.bytes(), id); - } else { - BytesBuffer buffer = BytesBuffer.allocate(1 + id.length()); - id = new BinaryId(buffer.writeId(id).bytes(), id); - } - return id; - } - - @Override - protected Query writeQueryEdgeCondition(Query query) { - ConditionQuery cq = (ConditionQuery) query; - if (cq.hasRangeCondition()) { - return this.writeQueryEdgeRangeCondition(cq); - } else { - return this.writeQueryEdgePrefixCondition(cq); - } - } - - private Query writeQueryEdgeRangeCondition(ConditionQuery cq) { - List sortValues = cq.syspropConditions(HugeKeys.SORT_VALUES); - E.checkArgument(sortValues.size() >= 1 && sortValues.size() <= 2, - "Edge range query must be with sort-values range"); - // Would ignore target vertex - Id vertex = cq.condition(HugeKeys.OWNER_VERTEX); - Directions direction = cq.condition(HugeKeys.DIRECTION); - if (direction == null) { - direction = Directions.OUT; - } - Id label = cq.condition(HugeKeys.LABEL); - - BytesBuffer start = BytesBuffer.allocate(BytesBuffer.BUF_EDGE_ID); - writePartitionedId(HugeType.EDGE, vertex, start); - start.write(direction.type().code()); - start.writeId(label); - - BytesBuffer end = BytesBuffer.allocate(BytesBuffer.BUF_EDGE_ID); - end.copyFrom(start); - - RangeConditions range = new RangeConditions(sortValues); - if (range.keyMin() != null) { - start.writeStringRaw((String) range.keyMin()); - } - if (range.keyMax() != null) { - end.writeStringRaw((String) range.keyMax()); - } - // Sort-value will be empty if there is no start sort-value - Id startId = new BinaryId(start.bytes(), null); - // Set endId as prefix if there is no end sort-value - Id endId = new BinaryId(end.bytes(), null); - - boolean includeStart = range.keyMinEq(); - if (cq.paging() && !cq.page().isEmpty()) { - includeStart = true; - byte[] position = PageState.fromString(cq.page()).position(); - E.checkArgument(Bytes.compare(position, startId.asBytes()) >= 0, - "Invalid page out of lower bound"); - startId = new BinaryId(position, null); - } - if (range.keyMax() == null) { - return new IdPrefixQuery(cq, startId, includeStart, endId); - } - return new IdRangeQuery(cq, startId, includeStart, endId, - range.keyMaxEq()); - } - - private Query writeQueryEdgePrefixCondition(ConditionQuery cq) { - int count = 0; - BytesBuffer buffer = BytesBuffer.allocate(BytesBuffer.BUF_EDGE_ID); - for (HugeKeys key : EdgeId.KEYS) { - Object value = cq.condition(key); - - if (value != null) { - count++; - } else { - if (key == HugeKeys.DIRECTION) { - // Direction is null, set to OUT - value = Directions.OUT; - } else { - break; - } - } - - if (key == HugeKeys.OWNER_VERTEX || - key == HugeKeys.OTHER_VERTEX) { - writePartitionedId(HugeType.EDGE, (Id) value, buffer); - } else if (key == HugeKeys.DIRECTION) { - byte t = ((Directions) value).type().code(); - buffer.write(t); - } else if (key == HugeKeys.LABEL) { - assert value instanceof Id; - buffer.writeId((Id) value); - } else if (key == HugeKeys.SORT_VALUES) { - assert value instanceof String; - buffer.writeStringWithEnding((String) value); - } else { - assert false : key; - } - } - - if (count > 0) { - assert count == cq.conditionsSize(); - return prefixQuery(cq, new BinaryId(buffer.bytes(), null)); - } - - return null; - } - - @Override - protected Query writeQueryCondition(Query query) { - HugeType type = query.resultType(); - if (!type.isIndex()) { - return query; - } - - ConditionQuery cq = (ConditionQuery) query; - - if (type.isNumericIndex()) { - // Convert range-index/shard-index query to id range query - return this.writeRangeIndexQuery(cq); - } else { - assert type.isSearchIndex() || type.isSecondaryIndex() || - type.isUniqueIndex(); - // Convert secondary-index or search-index query to id query - return this.writeStringIndexQuery(cq); - } - } - - private Query writeStringIndexQuery(ConditionQuery query) { - E.checkArgument(query.allSysprop() && - query.conditionsSize() == 2, - "There should be two conditions: " + - "INDEX_LABEL_ID and FIELD_VALUES" + - "in secondary index query"); - - Id index = query.condition(HugeKeys.INDEX_LABEL_ID); - Object key = query.condition(HugeKeys.FIELD_VALUES); - - E.checkArgument(index != null, "Please specify the index label"); - E.checkArgument(key != null, "Please specify the index key"); - - Id prefix = formatIndexId(query.resultType(), index, key, true); - return prefixQuery(query, prefix); - } - - private Query writeRangeIndexQuery(ConditionQuery query) { - Id index = query.condition(HugeKeys.INDEX_LABEL_ID); - E.checkArgument(index != null, "Please specify the index label"); - - List fields = query.syspropConditions(HugeKeys.FIELD_VALUES); - E.checkArgument(!fields.isEmpty(), - "Please specify the index field values"); - - HugeType type = query.resultType(); - Id start = null; - if (query.paging() && !query.page().isEmpty()) { - byte[] position = PageState.fromString(query.page()).position(); - start = new BinaryId(position, null); - } - - RangeConditions range = new RangeConditions(fields); - if (range.keyEq() != null) { - Id id = formatIndexId(type, index, range.keyEq(), true); - if (start == null) { - return new IdPrefixQuery(query, id); - } - E.checkArgument(Bytes.compare(start.asBytes(), id.asBytes()) >= 0, - "Invalid page out of lower bound"); - return new IdPrefixQuery(query, start, id); - } - - Object keyMin = range.keyMin(); - Object keyMax = range.keyMax(); - boolean keyMinEq = range.keyMinEq(); - boolean keyMaxEq = range.keyMaxEq(); - if (keyMin == null) { - E.checkArgument(keyMax != null, - "Please specify at least one condition"); - // Set keyMin to min value - keyMin = NumericUtil.minValueOf(keyMax.getClass()); - keyMinEq = true; - } - - Id min = formatIndexId(type, index, keyMin, false); - if (!keyMinEq) { - /* - * Increase 1 to keyMin, index GT query is a scan with GT prefix, - * inclusiveStart=false will also match index started with keyMin - */ - increaseOne(min.asBytes()); - keyMinEq = true; - } - - if (start == null) { - start = min; - } else { - E.checkArgument(Bytes.compare(start.asBytes(), min.asBytes()) >= 0, - "Invalid page out of lower bound"); - } - - if (keyMax == null) { - keyMax = NumericUtil.maxValueOf(keyMin.getClass()); - keyMaxEq = true; - } - Id max = formatIndexId(type, index, keyMax, false); - if (keyMaxEq) { - keyMaxEq = false; - increaseOne(max.asBytes()); - } - return new IdRangeQuery(query, start, keyMinEq, max, keyMaxEq); - } - - private BinaryId writeEdgeId(Id id) { - EdgeId edgeId; - if (id.type() == EDGE) { - edgeId = (EdgeId) id; - } else { - edgeId = EdgeId.parse(id.asString()); - } - BytesBuffer buffer = BytesBuffer.allocate(BytesBuffer.BUF_EDGE_ID); - if (this.enablePartition) { - buffer.writeShort(getPartition(HugeType.EDGE, edgeId.ownerVertexId())); - buffer.writeEdgeId(edgeId); - } else { - buffer.writeEdgeId(edgeId); - } - return new BinaryId(buffer.bytes(), id); - } - - private void writePartitionedId(HugeType type, Id id, BytesBuffer buffer) { - if (this.enablePartition) { - buffer.writeShort(getPartition(type, id)); - buffer.writeId(id); - } else { - buffer.writeId(id); - } - } - - protected short getPartition(HugeType type, Id id) { - return 0; - } - - public BackendEntry parse(BackendEntry originEntry) { - byte[] bytes = originEntry.id().asBytes(); - BinaryBackendEntry parsedEntry = new BinaryBackendEntry(originEntry.type(), bytes, - this.enablePartition); - if (this.enablePartition) { - bytes = Arrays.copyOfRange(bytes, parsedEntry.id().length() + 2, bytes.length); - } else { - bytes = Arrays.copyOfRange(bytes, parsedEntry.id().length(), bytes.length); - } - BytesBuffer buffer = BytesBuffer.allocate(BytesBuffer.BUF_EDGE_ID); - buffer.write(parsedEntry.id().asBytes()); - buffer.write(bytes); - parsedEntry = new BinaryBackendEntry(originEntry.type(), new BinaryId(buffer.bytes(), - BytesBuffer.wrap(buffer.bytes()).readEdgeId())); - - for (BackendEntry.BackendColumn col : originEntry.columns()) { - parsedEntry.column(buffer.bytes(), col.value); - } - return parsedEntry; - } - - private static Query prefixQuery(ConditionQuery query, Id prefix) { - Query newQuery; - if (query.paging() && !query.page().isEmpty()) { - /* - * If used paging and the page number is not empty, deserialize - * the page to id and use it as the starting row for this query - */ - byte[] position = PageState.fromString(query.page()).position(); - E.checkArgument(Bytes.compare(position, prefix.asBytes()) >= 0, - "Invalid page out of lower bound"); - BinaryId start = new BinaryId(position, null); - newQuery = new IdPrefixQuery(query, start, prefix); - } else { - newQuery = new IdPrefixQuery(query, prefix); - } - return newQuery; - } - - protected static BinaryId formatIndexId(HugeType type, Id indexLabel, - Object fieldValues, - boolean equal) { - boolean withEnding = type.isRangeIndex() || equal; - Id id = HugeIndex.formatIndexId(type, indexLabel, fieldValues); - if (!type.isNumericIndex() && indexIdLengthExceedLimit(id)) { - id = HugeIndex.formatIndexHashId(type, indexLabel, fieldValues); - } - BytesBuffer buffer = BytesBuffer.allocate(1 + id.length()); - byte[] idBytes = buffer.writeIndexId(id, type, withEnding).bytes(); - return new BinaryId(idBytes, id); - } - - protected static boolean indexIdLengthExceedLimit(Id id) { - return id.asBytes().length > BytesBuffer.INDEX_HASH_ID_THRESHOLD; - } - - protected static boolean indexFieldValuesUnmatched(byte[] value, - Object fieldValues) { - if (value != null && value.length > 0 && fieldValues != null) { - if (!StringEncoding.decode(value).equals(fieldValues)) { - return true; - } - } - return false; - } - - public static final byte[] increaseOne(byte[] bytes) { - final byte BYTE_MAX_VALUE = (byte) 0xff; - assert bytes.length > 0; - byte last = bytes[bytes.length - 1]; - if (last != BYTE_MAX_VALUE) { - bytes[bytes.length - 1] += 0x01; - } else { - // Process overflow (like [1, 255] => [2, 0]) - int i = bytes.length - 1; - for (; i > 0 && bytes[i] == BYTE_MAX_VALUE; --i) { - bytes[i] += 0x01; - } - if (bytes[i] == BYTE_MAX_VALUE) { - assert i == 0; - throw new BackendException("Unable to increase bytes: %s", - Bytes.toHex(bytes)); - } - bytes[i] += 0x01; - } - return bytes; - } - - @Override - public BackendEntry writeVertexLabel(VertexLabel vertexLabel) { - SchemaSerializer serializer = new SchemaSerializer(); - return serializer.writeVertexLabel(vertexLabel); - } - - @Override - public VertexLabel readVertexLabel(HugeGraph graph, - BackendEntry backendEntry) { - if (backendEntry == null) { - return null; - } - BinaryBackendEntry entry = this.convertEntry(backendEntry); - - SchemaSerializer serializer = new SchemaSerializer(); - return serializer.readVertexLabel(graph, entry); - } - - @Override - public BackendEntry writeEdgeLabel(EdgeLabel edgeLabel) { - SchemaSerializer serializer = new SchemaSerializer(); - return serializer.writeEdgeLabel(edgeLabel); - } - - @Override - public EdgeLabel readEdgeLabel(HugeGraph graph, BackendEntry backendEntry) { - if (backendEntry == null) { - return null; - } - BinaryBackendEntry entry = this.convertEntry(backendEntry); - - SchemaSerializer serializer = new SchemaSerializer(); - return serializer.readEdgeLabel(graph, entry); - } - - @Override - public BackendEntry writePropertyKey(PropertyKey propertyKey) { - SchemaSerializer serializer = new SchemaSerializer(); - return serializer.writePropertyKey(propertyKey); - } - - @Override - public PropertyKey readPropertyKey(HugeGraph graph, - BackendEntry backendEntry) { - if (backendEntry == null) { - return null; - } - BinaryBackendEntry entry = this.convertEntry(backendEntry); - - SchemaSerializer serializer = new SchemaSerializer(); - return serializer.readPropertyKey(graph, entry); - } - - @Override - public BackendEntry writeIndexLabel(IndexLabel indexLabel) { - SchemaSerializer serializer = new SchemaSerializer(); - return serializer.writeIndexLabel(indexLabel); - } - - @Override - public IndexLabel readIndexLabel(HugeGraph graph, - BackendEntry backendEntry) { - if (backendEntry == null) { - return null; - } - BinaryBackendEntry entry = this.convertEntry(backendEntry); - - SchemaSerializer serializer = new SchemaSerializer(); - return serializer.readIndexLabel(graph, entry); - } - - private final class SchemaSerializer { - - private BinaryBackendEntry entry; - - public BinaryBackendEntry writeVertexLabel(VertexLabel schema) { - this.entry = newBackendEntry(schema); - writeString(HugeKeys.NAME, schema.name()); - writeEnum(HugeKeys.ID_STRATEGY, schema.idStrategy()); - writeIds(HugeKeys.PROPERTIES, schema.properties()); - writeIds(HugeKeys.PRIMARY_KEYS, schema.primaryKeys()); - writeIds(HugeKeys.NULLABLE_KEYS, schema.nullableKeys()); - writeIds(HugeKeys.INDEX_LABELS, schema.indexLabels()); - writeBool(HugeKeys.ENABLE_LABEL_INDEX, schema.enableLabelIndex()); - writeEnum(HugeKeys.STATUS, schema.status()); - writeLong(HugeKeys.TTL, schema.ttl()); - writeId(HugeKeys.TTL_START_TIME, schema.ttlStartTime()); - writeUserdata(schema); - return this.entry; - } - - public VertexLabel readVertexLabel(HugeGraph graph, - BinaryBackendEntry entry) { - E.checkNotNull(entry, "entry"); - this.entry = entry; - Id id = entry.id().origin(); - String name = readString(HugeKeys.NAME); - - VertexLabel vertexLabel = new VertexLabel(graph, id, name); - vertexLabel.idStrategy(readEnum(HugeKeys.ID_STRATEGY, - IdStrategy.class)); - vertexLabel.properties(readIds(HugeKeys.PROPERTIES)); - vertexLabel.primaryKeys(readIds(HugeKeys.PRIMARY_KEYS)); - vertexLabel.nullableKeys(readIds(HugeKeys.NULLABLE_KEYS)); - vertexLabel.addIndexLabels(readIds(HugeKeys.INDEX_LABELS)); - vertexLabel.enableLabelIndex(readBool(HugeKeys.ENABLE_LABEL_INDEX)); - vertexLabel.status(readEnum(HugeKeys.STATUS, SchemaStatus.class)); - vertexLabel.ttl(readLong(HugeKeys.TTL)); - vertexLabel.ttlStartTime(readId(HugeKeys.TTL_START_TIME)); - readUserdata(vertexLabel); - return vertexLabel; - } - - public BinaryBackendEntry writeEdgeLabel(EdgeLabel schema) { - this.entry = newBackendEntry(schema); - writeString(HugeKeys.NAME, schema.name()); - writeId(HugeKeys.SOURCE_LABEL, schema.sourceLabel()); - writeId(HugeKeys.TARGET_LABEL, schema.targetLabel()); - writeEnum(HugeKeys.FREQUENCY, schema.frequency()); - writeIds(HugeKeys.PROPERTIES, schema.properties()); - writeIds(HugeKeys.SORT_KEYS, schema.sortKeys()); - writeIds(HugeKeys.NULLABLE_KEYS, schema.nullableKeys()); - writeIds(HugeKeys.INDEX_LABELS, schema.indexLabels()); - writeBool(HugeKeys.ENABLE_LABEL_INDEX, schema.enableLabelIndex()); - writeEnum(HugeKeys.STATUS, schema.status()); - writeLong(HugeKeys.TTL, schema.ttl()); - writeId(HugeKeys.TTL_START_TIME, schema.ttlStartTime()); - writeUserdata(schema); - return this.entry; - } - - public EdgeLabel readEdgeLabel(HugeGraph graph, - BinaryBackendEntry entry) { - E.checkNotNull(entry, "entry"); - this.entry = entry; - Id id = entry.id().origin(); - String name = readString(HugeKeys.NAME); - - EdgeLabel edgeLabel = new EdgeLabel(graph, id, name); - edgeLabel.sourceLabel(readId(HugeKeys.SOURCE_LABEL)); - edgeLabel.targetLabel(readId(HugeKeys.TARGET_LABEL)); - edgeLabel.frequency(readEnum(HugeKeys.FREQUENCY, Frequency.class)); - edgeLabel.properties(readIds(HugeKeys.PROPERTIES)); - edgeLabel.sortKeys(readIds(HugeKeys.SORT_KEYS)); - edgeLabel.nullableKeys(readIds(HugeKeys.NULLABLE_KEYS)); - edgeLabel.addIndexLabels(readIds(HugeKeys.INDEX_LABELS)); - edgeLabel.enableLabelIndex(readBool(HugeKeys.ENABLE_LABEL_INDEX)); - edgeLabel.status(readEnum(HugeKeys.STATUS, SchemaStatus.class)); - edgeLabel.ttl(readLong(HugeKeys.TTL)); - edgeLabel.ttlStartTime(readId(HugeKeys.TTL_START_TIME)); - readUserdata(edgeLabel); - return edgeLabel; - } - - public BinaryBackendEntry writePropertyKey(PropertyKey schema) { - this.entry = newBackendEntry(schema); - writeString(HugeKeys.NAME, schema.name()); - writeEnum(HugeKeys.DATA_TYPE, schema.dataType()); - writeEnum(HugeKeys.CARDINALITY, schema.cardinality()); - writeEnum(HugeKeys.AGGREGATE_TYPE, schema.aggregateType()); - writeEnum(HugeKeys.WRITE_TYPE, schema.writeType()); - writeIds(HugeKeys.PROPERTIES, schema.properties()); - writeEnum(HugeKeys.STATUS, schema.status()); - writeUserdata(schema); - return this.entry; - } - - public PropertyKey readPropertyKey(HugeGraph graph, - BinaryBackendEntry entry) { - E.checkNotNull(entry, "entry"); - this.entry = entry; - Id id = entry.id().origin(); - String name = readString(HugeKeys.NAME); - - PropertyKey propertyKey = new PropertyKey(graph, id, name); - propertyKey.dataType(readEnum(HugeKeys.DATA_TYPE, DataType.class)); - propertyKey.cardinality(readEnum(HugeKeys.CARDINALITY, - Cardinality.class)); - propertyKey.aggregateType(readEnum(HugeKeys.AGGREGATE_TYPE, - AggregateType.class)); - propertyKey.writeType(readEnumOrDefault(HugeKeys.WRITE_TYPE, - WriteType.class, - WriteType.OLTP)); - propertyKey.properties(readIds(HugeKeys.PROPERTIES)); - propertyKey.status(readEnum(HugeKeys.STATUS, SchemaStatus.class)); - readUserdata(propertyKey); - return propertyKey; - } - - public BinaryBackendEntry writeIndexLabel(IndexLabel schema) { - this.entry = newBackendEntry(schema); - writeString(HugeKeys.NAME, schema.name()); - writeEnum(HugeKeys.BASE_TYPE, schema.baseType()); - writeId(HugeKeys.BASE_VALUE, schema.baseValue()); - writeEnum(HugeKeys.INDEX_TYPE, schema.indexType()); - writeIds(HugeKeys.FIELDS, schema.indexFields()); - writeEnum(HugeKeys.STATUS, schema.status()); - writeUserdata(schema); - return this.entry; - } - - public IndexLabel readIndexLabel(HugeGraph graph, - BinaryBackendEntry entry) { - E.checkNotNull(entry, "entry"); - this.entry = entry; - Id id = entry.id().origin(); - String name = readString(HugeKeys.NAME); - - IndexLabel indexLabel = new IndexLabel(graph, id, name); - indexLabel.baseType(readEnum(HugeKeys.BASE_TYPE, HugeType.class)); - indexLabel.baseValue(readId(HugeKeys.BASE_VALUE)); - indexLabel.indexType(readEnum(HugeKeys.INDEX_TYPE, - IndexType.class)); - indexLabel.indexFields(readIds(HugeKeys.FIELDS)); - indexLabel.status(readEnum(HugeKeys.STATUS, SchemaStatus.class)); - readUserdata(indexLabel); - return indexLabel; - } - - private void writeUserdata(SchemaElement schema) { - String userdataStr = JsonUtil.toJson(schema.userdata()); - writeString(HugeKeys.USER_DATA, userdataStr); - } - - private void readUserdata(SchemaElement schema) { - // Parse all user data of a schema element - byte[] userdataBytes = column(HugeKeys.USER_DATA); - String userdataStr = StringEncoding.decode(userdataBytes); - @SuppressWarnings("unchecked") - Map userdata = JsonUtil.fromJson(userdataStr, - Map.class); - for (Map.Entry e : userdata.entrySet()) { - schema.userdata(e.getKey(), e.getValue()); - } - } - - private void writeString(HugeKeys key, String value) { - this.entry.column(formatColumnName(key), - StringEncoding.encode(value)); - } - - private String readString(HugeKeys key) { - return StringEncoding.decode(column(key)); - } - - private void writeEnum(HugeKeys key, DataType value) { - this.entry.column(formatColumnName(key), new byte[]{value.code()}); - } - - private T readEnum(HugeKeys key, - Class clazz) { - byte[] value = column(key); - E.checkState(value.length == 1, - "The length of column '%s' must be 1, but is '%s'", - key, value.length); - return DataType.fromCode(clazz, value[0]); - } - - private T readEnumOrDefault(HugeKeys key, - Class clazz, - T defaultValue) { - BackendColumn column = this.entry.column(formatColumnName(key)); - if (column == null) { - return defaultValue; - } - E.checkNotNull(column.value, "column.value"); - return DataType.fromCode(clazz, column.value[0]); - } - - private void writeLong(HugeKeys key, long value) { - @SuppressWarnings("resource") - BytesBuffer buffer = new BytesBuffer(8); - buffer.writeVLong(value); - this.entry.column(formatColumnName(key), buffer.bytes()); - } - - private long readLong(HugeKeys key) { - byte[] value = column(key); - BytesBuffer buffer = BytesBuffer.wrap(value); - return buffer.readVLong(); - } - - private void writeId(HugeKeys key, Id value) { - this.entry.column(formatColumnName(key), writeId(value)); - } - - private Id readId(HugeKeys key) { - return readId(column(key)); - } - - private void writeIds(HugeKeys key, Collection value) { - this.entry.column(formatColumnName(key), writeIds(value)); - } - - private Id[] readIds(HugeKeys key) { - return readIds(column(key)); - } - - private void writeBool(HugeKeys key, boolean value) { - this.entry.column(formatColumnName(key), - new byte[]{(byte) (value ? 1 : 0)}); - } - - private boolean readBool(HugeKeys key) { - byte[] value = column(key); - E.checkState(value.length == 1, - "The length of column '%s' must be 1, but is '%s'", - key, value.length); - return value[0] != (byte) 0; - } - - private byte[] writeId(Id id) { - int size = 1 + id.length(); - BytesBuffer buffer = BytesBuffer.allocate(size); - buffer.writeId(id); - return buffer.bytes(); - } - - private Id readId(byte[] value) { - BytesBuffer buffer = BytesBuffer.wrap(value); - return buffer.readId(); - } - - private byte[] writeIds(Collection ids) { - E.checkState(ids.size() <= BytesBuffer.UINT16_MAX, - "The number of properties of vertex/edge label " + - "can't exceed '%s'", BytesBuffer.UINT16_MAX); - int size = 2; - for (Id id : ids) { - size += (1 + id.length()); - } - BytesBuffer buffer = BytesBuffer.allocate(size); - buffer.writeUInt16(ids.size()); - for (Id id : ids) { - buffer.writeId(id); - } - return buffer.bytes(); - } - - private Id[] readIds(byte[] value) { - BytesBuffer buffer = BytesBuffer.wrap(value); - int size = buffer.readUInt16(); - Id[] ids = new Id[size]; - for (int i = 0; i < size; i++) { - Id id = buffer.readId(); - ids[i] = id; - } - return ids; - } - - private byte[] column(HugeKeys key) { - BackendColumn column = this.entry.column(formatColumnName(key)); - E.checkState(column != null, "Not found key '%s' from entry %s", - key, this.entry); - E.checkNotNull(column.value, "column.value"); - return column.value; - } - - private byte[] formatColumnName(HugeKeys key) { - Id id = this.entry.id().origin(); - int size = 1 + id.length() + 1; - BytesBuffer buffer = BytesBuffer.allocate(size); - buffer.writeId(id); - buffer.write(key.code()); - return buffer.bytes(); - } - } -} diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/reuse/BytesDemo.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/reuse/BytesDemo.java index 6571fbee6..1d29ea689 100644 --- a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/reuse/BytesDemo.java +++ b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/reuse/BytesDemo.java @@ -1,17 +1,24 @@ package com.baidu.hugegraph.serializer.direct.reuse; +import java.io.IOException; import java.util.ArrayList; import java.util.List; -import java.util.Map; import com.baidu.hugegraph.driver.GraphManager; import com.baidu.hugegraph.driver.HugeClient; import com.baidu.hugegraph.driver.SchemaManager; -import com.baidu.hugegraph.serializer.direct.BinaryEntry; +import com.baidu.hugegraph.serializer.direct.HBaseSerializer; import com.baidu.hugegraph.serializer.direct.RocksDBSerializer; -import com.baidu.hugegraph.structure.GraphElement; import com.baidu.hugegraph.structure.graph.Edge; import com.baidu.hugegraph.structure.graph.Vertex; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.Connection; +import org.apache.hadoop.hbase.client.ConnectionFactory; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.Table; +import org.apache.hadoop.hbase.util.Bytes; /** * @author jin @@ -24,6 +31,7 @@ public class BytesDemo { static HugeClient client; boolean bypassServer = true; RocksDBSerializer ser; + HBaseSerializer HBaseSer; public static void main(String[] args) { BytesDemo ins = new BytesDemo(); @@ -31,59 +39,57 @@ public static void main(String[] args) { } void initGraph() { + int edgeLogicPartitions = 16; + int vertexLogicPartitions = 8; // If connect failed will throw an exception. - client = HugeClient.builder("http://localhost:8080", "hugegraph").build(); + client = HugeClient.builder("http://localhost:8081", "hugegraph").build(); SchemaManager schema = client.schema(); + schema.propertyKey("name").asText().ifNotExist().create(); schema.propertyKey("age").asInt().ifNotExist().create(); schema.propertyKey("lang").asText().ifNotExist().create(); - schema.propertyKey("date").asDate().ifNotExist().create(); - schema.propertyKey("price").asInt().ifNotExist().create(); - - schema.vertexLabel("person") - .properties("name", "age") - .primaryKeys("name") - .ifNotExist() - .create(); + schema.propertyKey("date").asText().ifNotExist().create(); + schema.propertyKey("price").asText().ifNotExist().create(); schema.vertexLabel("person") - .properties("price") - .nullableKeys("price") - .append(); + .properties("name", "age") + .useCustomizeStringId() + .enableLabelIndex(false) + .ifNotExist() + .create(); + + schema.vertexLabel("personB") + .properties("price") + .nullableKeys("price") + .useCustomizeNumberId() + .enableLabelIndex(false) + .ifNotExist() + .create(); schema.vertexLabel("software") - .properties("name", "lang", "price") - .primaryKeys("name") - .ifNotExist() - .create(); - - schema.indexLabel("softwareByPrice") - .onV("software").by("price") - .range() - .ifNotExist() - .create(); + .properties("name", "lang", "price") + .useCustomizeStringId() + .enableLabelIndex(false) + .ifNotExist() + .create(); schema.edgeLabel("knows") - .link("person", "person") - .properties("date") - .ifNotExist() - .create(); + .link("person", "person") + .properties("date") + .enableLabelIndex(false) + .ifNotExist() + .create(); schema.edgeLabel("created") - .link("person", "software") - .properties("date") - .ifNotExist() - .create(); - - schema.indexLabel("createdByDate") - .onE("created").by("date") - .secondary() - .ifNotExist() - .create(); - - ser = new RocksDBSerializer(client); + .link("person", "software") + .properties("date") + .enableLabelIndex(false) + .ifNotExist() + .create(); + + HBaseSer = new HBaseSerializer(client, vertexLogicPartitions, edgeLogicPartitions); writeGraphElements(); client.close(); @@ -91,39 +97,31 @@ void initGraph() { private void writeGraphElements() { GraphManager graph = client.graph(); - // construct some vertexes & edges - Vertex marko = new Vertex("person").property("name", "marko").property("age", 29); - Vertex vadas = new Vertex("person").property("name", "vadas").property("age", 27); - Vertex lop = new Vertex("software").property("name", "lop").property("lang", "java") - .property("price", 328); - Vertex josh = new Vertex("person").property("name", "josh").property("age", 32); - Vertex ripple = new Vertex("software").property("name", "ripple") - .property("lang", "java") - .property("price", 199); - Vertex peter = new Vertex("person").property("name", "peter").property("age", 35); - - Edge markoKnowsVadas = new Edge("knows").source(marko).target(vadas) - .property("date", "2016-01-10"); - Edge markoKnowsJosh = new Edge("knows").source(marko).target(josh) - .property("date", "2013-02-20"); - Edge markoCreateLop = new Edge("created").source(marko).target(lop) - .property("date", "2017-12-10"); - Edge joshCreateRipple = new Edge("created").source(josh).target(ripple) - .property("date", "2017-12-10"); - Edge joshCreateLop = new Edge("created").source(josh).target(lop) - .property("date", "2009-11-11"); - Edge peterCreateLop = new Edge("created").source(peter).target(lop) - .property("date", "2017-03-24"); + Vertex peter = new Vertex("person"); + peter.property("name", "peter"); + peter.property("age", 35); + peter.id("peter"); + + Vertex lop = new Vertex("software"); + lop.property("name", "lop"); + lop.property("lang", "java"); + lop.property("price", "328"); + lop.id("lop"); + + Vertex vadasB = new Vertex("personB"); + vadasB.property("price", "120"); + vadasB.id(12345); + + Edge peterCreateLop = new Edge("created").source(peter).target(lop).property("date", "2017-03-24"); List vertices = new ArrayList(){{ - add(marko);add(vadas);add(lop);add(josh);add(ripple);add(peter); + add(peter);add(lop);add(vadasB); }}; List edges = new ArrayList(){{ - add(markoKnowsVadas);add(markoKnowsJosh);add(markoCreateLop);add(joshCreateRipple); - add(joshCreateLop);add(peterCreateLop); + add(peterCreateLop); }}; // Old way: encode to json then send to server @@ -139,48 +137,18 @@ private void writeGraphElements() { * */ void writeDirectly(List vertices, List edges) { for (Vertex vertex : vertices) { - BinaryEntry entry = ser.writeVertex(vertex); - byte[] rowkey = getKeyBytes(vertex); - byte[] values = getValueBytes(vertex); - sendRpcToRocksDB(rowkey, values); + byte[] rowkey = HBaseSer.getKeyBytes(vertex); + byte[] values = HBaseSer.getValueBytes(vertex); + sendRpcToHBase("vertex", rowkey, values); } for (Edge edge: edges) { - byte[] rowkey = getKeyBytes(edge); - byte[] values = getValueBytes(edge); - sendRpcToRocksDB(rowkey, values); + byte[] rowkey = HBaseSer.getKeyBytes(edge); + byte[] values = HBaseSer.getValueBytes(edge); + sendRpcToHBase("edge", rowkey, values); } } - byte[] getKeyBytes(GraphElement e) { - Object id = e.id(); - String type = e.type(); - return id2Bytes(id, type); - } - - byte[] id2Bytes(Object id, String type) { - byte[] res = null; - - if ("vertex".equals(type)) { - ser.writeVertex() - } else if ("edge".equals(type)) { - - } - - return res; - } - - byte[] getValueBytes(GraphElement e) { - Map properties = e.properties(); - return propertyToBytes(properties); - } - - byte[] propertyToBytes(Map properties) { - byte[] res = null; - - return res; - } - boolean sendRpcToRocksDB(byte[] rowkey, byte[] values) { // here we call the rpc boolean flag = false; @@ -196,4 +164,42 @@ void writeByServer(GraphManager graph, List vertices, List edges) edges.forEach(System.out::println); } + boolean sendRpcToHBase(String type, byte[] rowkey, byte[] values) { + boolean flag = false; + try { + flag = put(type, rowkey, values); + } catch (IOException e) { + e.printStackTrace(); + } + return flag; + } + + + boolean put (String type, byte[] rowkey, byte[] values) throws IOException { + Configuration config = HBaseConfiguration.create(); + // Our json records sometimes are very big, we have + // disable the maxsize check on the keyvalue. + config.set("hbase.zookeeper.quorum", "localhost"); + config.set("hbase.zookeeper.property.clientPort", "2181"); + + + Connection conn = ConnectionFactory.createConnection(config); + Table htable = null ; + if (type.equals("vertex")) { + htable = conn.getTable(TableName.valueOf("hugegraph12p:g_v")); + } else if (type.equals("edge")) { + htable = conn.getTable(TableName.valueOf("hugegraph12p:g_oe")); + } + + Put put = new Put(rowkey); + put.addColumn(Bytes.toBytes("f"), + Bytes.toBytes(""), + values); + htable.put(put); + htable.close(); + + + return true; + } + } diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/struct/ElementBuilder.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/struct/ElementBuilder.java deleted file mode 100644 index 74ba99f9d..000000000 --- a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/struct/ElementBuilder.java +++ /dev/null @@ -1,625 +0,0 @@ -/* - * Copyright 2017 HugeGraph Authors - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with this - * work for additional information regarding copyright ownership. The ASF - * licenses this file to You under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.baidu.hugegraph.serializer.direct.struct; - -import java.nio.ByteBuffer; -import java.nio.CharBuffer; -import java.nio.charset.CharsetEncoder; -import java.nio.charset.CoderResult; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Date; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.UUID; -import java.util.stream.Collectors; - -import org.apache.commons.collections.CollectionUtils; -import org.apache.commons.collections.ListUtils; -import org.apache.commons.lang3.StringUtils; - -import com.baidu.hugegraph.driver.HugeClient; -import com.baidu.hugegraph.serializer.direct.util.DataTypeUtil; -import com.baidu.hugegraph.serializer.direct.util.GraphSchema; -import com.baidu.hugegraph.structure.GraphElement; -import com.baidu.hugegraph.structure.constant.IdStrategy; -import com.baidu.hugegraph.structure.graph.Vertex; -import com.baidu.hugegraph.structure.schema.EdgeLabel; -import com.baidu.hugegraph.structure.schema.PropertyKey; -import com.baidu.hugegraph.structure.schema.SchemaLabel; -import com.baidu.hugegraph.structure.schema.VertexLabel; -import com.baidu.hugegraph.util.E; -import com.baidu.hugegraph.util.LongEncoding; -import com.google.common.base.Charsets; -import com.google.common.collect.ImmutableList; - -public abstract class ElementBuilder { - - private final GraphSchema schema; - - // NOTE: CharsetEncoder is not thread safe - private final CharsetEncoder encoder; - private final ByteBuffer buffer; - - public ElementBuilder(HugeClient client) { - this.schema = new GraphSchema(client); - this.encoder = Charsets.UTF_8.newEncoder(); - this.buffer = ByteBuffer.allocate(128); - } - - public abstract List build(String[] names, Object[] values); - - public abstract SchemaLabel schemaLabel(); - - protected abstract Collection nonNullableKeys(); - - protected abstract boolean isIdField(String fieldName); - - @SuppressWarnings("unchecked") - protected Collection nonNullableKeys(SchemaLabel schemaLabel) { - return CollectionUtils.subtract(schemaLabel.properties(), - schemaLabel.nullableKeys()); - } - - protected VertexKVPairs newKVPairs(VertexLabel vertexLabel, - boolean unfold) { - IdStrategy idStrategy = vertexLabel.idStrategy(); - if (idStrategy.isCustomize()) { - if (unfold) { - return new VertexFlatIdKVPairs(vertexLabel); - } else { - return new VertexIdKVPairs(vertexLabel); - } - } else { - assert idStrategy.isPrimaryKey(); - if (unfold) { - return new VertexFlatPkKVPairs(vertexLabel); - } else { - return new VertexPkKVPairs(vertexLabel); - } - } - } - - protected void addProperty(GraphElement element, String key, Object value) { - this.addProperty(element, key, value, true); - } - - protected void addProperty(GraphElement element, String key, Object value, - boolean needConvert) { - if (needConvert) { - value = this.convertPropertyValue(key, value); - } - element.property(key, value); - } - - protected void addProperties(GraphElement element, - Map properties) { - for (Map.Entry entry : properties.entrySet()) { - String key = entry.getKey(); - Object value = entry.getValue(); - this.checkFieldValue(key, value); - value = this.convertPropertyValue(key, value); - - element.property(key, value); - } - } - - protected void checkNonNullableKeys(GraphElement element) { - Set keys = element.properties().keySet(); - // Check whether passed all non-null property - Collection requiredKeys = this.nonNullableKeys(); - if (!keys.containsAll(requiredKeys)) { - @SuppressWarnings("unchecked") - Collection missed = CollectionUtils.subtract(requiredKeys, - keys); - E.checkArgument(false, "All non-null property keys %s of '%s' " + - "must be setted, but missed keys %s", - requiredKeys, this.schemaLabel().name(), missed); - } - } - - protected PropertyKey getPropertyKey(String name) { - return this.schema.getPropertyKey(name); - } - - protected VertexLabel getVertexLabel(String name) { - return this.schema.getVertexLabel(name); - } - - protected EdgeLabel getEdgeLabel(String name) { - return this.schema.getEdgeLabel(name); - } - - protected Object mappingValue(String fieldName, Object fieldValue) { - if (this.mapping().mappingValues().isEmpty()) { - return fieldValue; - } - String fieldStrValue = String.valueOf(fieldValue); - return this.mapping().mappingValue(fieldName, fieldStrValue); - } - - private void customizeId(VertexLabel vertexLabel, Vertex vertex, - String idField, Object idValue) { - E.checkArgumentNotNull(idField, "The vertex id field can't be null"); - E.checkArgumentNotNull(idValue, "The vertex id value can't be null"); - IdStrategy idStrategy = vertexLabel.idStrategy(); - if (idStrategy.isCustomizeString()) { - String id = (String) idValue; - this.checkVertexIdLength(id); - vertex.id(id); - } else if (idStrategy.isCustomizeNumber()) { - Long id = DataTypeUtil.parseNumber(idField, idValue); - vertex.id(id); - } else { - assert idStrategy.isCustomizeUuid(); - UUID id = DataTypeUtil.parseUUID(idField, idValue); - vertex.id(id); - } - } - - private Object convertPropertyValue(String key, Object rawValue) { - PropertyKey propertyKey = this.getPropertyKey(key); - InputSource inputSource = this.struct.input(); - return DataTypeUtil.convert(rawValue, propertyKey, inputSource); - } - - private void checkFieldValue(String fieldName, Object fieldValue) { - if (this.mapping().mappingValues().isEmpty() || - !this.mapping().mappingValues().containsKey(fieldName)) { - return; - } - // NOTE: The nullable values has been filtered before this - E.checkArgument(fieldValue != null, "The field value can't be null"); - E.checkArgument(DataTypeUtil.isSimpleValue(fieldValue), - "The field value must be simple type, actual is '%s'", - fieldValue.getClass()); - } - - private boolean vertexIdEmpty(VertexLabel vertexLabel, Vertex vertex) { - IdStrategy idStrategy = vertexLabel.idStrategy(); - if (idStrategy.isCustomizeString()) { - Object vertexId = vertex.id(); - return vertexId == null || StringUtils.isEmpty((String) vertexId); - } - return false; - } - - private void checkPrimaryValuesValid(VertexLabel vertexLabel, - Object[] primaryValues) { - List primaryKeys = vertexLabel.primaryKeys(); - E.checkArgument(primaryKeys.size() == primaryValues.length, - "Missing some primary key values, expect %s, " + - "but only got %s for vertex label '%s'", - primaryKeys, Arrays.toString(primaryValues), - vertexLabel); - for (int i = 0; i < primaryKeys.size(); i++) { - E.checkArgument(primaryValues[i] != null, - "Make sure the value of the primary key '%s' is " + - "not empty, or check whether the headers or " + - "field_mapping are configured correctly", - primaryKeys.get(i)); - } - } - - private String spliceVertexId(VertexLabel vertexLabel, - Object... primaryValues) { - StringBuilder vertexId = new StringBuilder(); - StringBuilder vertexKeysId = new StringBuilder(); - for (int i = 0; i < primaryValues.length; i++) { - Object value = primaryValues[i]; - String pkValue; - if (value instanceof Number || value instanceof Date) { - pkValue = LongEncoding.encodeNumber(value); - } else { - pkValue = String.valueOf(value); - } - if (StringUtils.containsAny(pkValue, Constants.SEARCH_LIST)) { - pkValue = StringUtils.replaceEach(pkValue, - Constants.SEARCH_LIST, - Constants.TARGET_LIST); - } - vertexKeysId.append(pkValue); - vertexKeysId.append("!"); - } - vertexId.append(vertexLabel.id()).append(":").append(vertexKeysId); - vertexId.deleteCharAt(vertexId.length() - 1); - return vertexId.toString(); - } - - private void checkVertexIdLength(String id) { - this.encoder.reset(); - this.buffer.clear(); - CoderResult r = this.encoder.encode(CharBuffer.wrap(id.toCharArray()), - this.buffer, true); - E.checkArgument(r.isUnderflow(), - "The vertex id length exceeds limit %s : '%s'", - Constants.VERTEX_ID_LIMIT, id); - } - - private boolean isEmptyPkValue(Object pkValue) { - if (pkValue == null) { - return true; - } - if (pkValue instanceof String) { - String pkValueStr = (String) pkValue; - return pkValueStr.isEmpty(); - } - return false; - } - - public abstract class VertexKVPairs { - - public final VertexLabel vertexLabel; - // General properties - public Map properties; - - public VertexKVPairs(VertexLabel vertexLabel) { - this.vertexLabel = vertexLabel; - this.properties = null; - } - - public abstract void extractFromVertex(String[] names, - Object[] values); - - public abstract void extractFromEdge(String[] names, Object[] values, - int[] fieldIndexes); - - public abstract List buildVertices(boolean withProperty); - - public List splitField(String key, Object value) { - return DataTypeUtil.splitField(key, value, struct.input()); - } - } - - public class VertexIdKVPairs extends VertexKVPairs { - - // The idField(raw field), like: id - private String idField; - // The single idValue(mapped), like: A -> 1 - private Object idValue; - - public VertexIdKVPairs(VertexLabel vertexLabel) { - super(vertexLabel); - } - - @Override - public void extractFromVertex(String[] names, Object[] values) { - // General properties - this.properties = new HashMap<>(); - for (int i = 0; i < names.length; i++) { - String fieldName = names[i]; - Object fieldValue = values[i]; - if (!retainField(fieldName, fieldValue)) { - continue; - } - if (isIdField(fieldName)) { - this.idField = fieldName; - this.idValue = mappingValue(fieldName, fieldValue); - } else { - String key = mapping().mappingField(fieldName); - Object value = mappingValue(fieldName, fieldValue); - this.properties.put(key, value); - } - } - } - - @Override - public void extractFromEdge(String[] names, Object[] values, - int[] fieldIndexes) { - assert fieldIndexes.length == 1; - String fieldName = names[fieldIndexes[0]]; - Object fieldValue = values[fieldIndexes[0]]; - this.idField = fieldName; - this.idValue = mappingValue(fieldName, fieldValue); - } - - @Override - public List buildVertices(boolean withProperty) { - Vertex vertex = new Vertex(vertexLabel.name()); - customizeId(vertexLabel, vertex, this.idField, this.idValue); - if (vertexIdEmpty(vertexLabel, vertex)) { - return ImmutableList.of(); - } - if (withProperty) { - String key = mapping().mappingField(this.idField); - // The id field is also used as a general property - if (vertexLabel.properties().contains(key)) { - addProperty(vertex, key, this.idValue); - } - addProperties(vertex, this.properties); - checkNonNullableKeys(vertex); - } - return ImmutableList.of(vertex); - } - } - - public class VertexFlatIdKVPairs extends VertexKVPairs { - - // The idField(raw field), like: id - private String idField; - /* - * The multiple idValues(spilted and mapped) - * like: A|B|C -> [1,2,3] - */ - private List idValues; - - public VertexFlatIdKVPairs(VertexLabel vertexLabel) { - super(vertexLabel); - } - - @Override - public void extractFromVertex(String[] names, Object[] values) { - // General properties - this.properties = new HashMap<>(); - for (int i = 0; i < names.length; i++) { - String fieldName = names[i]; - Object fieldValue = values[i]; - if (!retainField(fieldName, fieldValue)) { - continue; - } - if (isIdField(fieldName)) { - this.idField = fieldName; - List rawIdValues = splitField(fieldName, - fieldValue); - this.idValues = rawIdValues.stream().map(rawIdValue -> { - return mappingValue(fieldName, rawIdValue); - }).collect(Collectors.toList()); - } else { - String key = mapping().mappingField(fieldName); - Object value = mappingValue(fieldName, fieldValue); - this.properties.put(key, value); - } - } - } - - @Override - public void extractFromEdge(String[] names, Object[] values, - int[] fieldIndexes) { - assert fieldIndexes.length == 1; - String fieldName = names[fieldIndexes[0]]; - Object fieldValue = values[fieldIndexes[0]]; - this.idField = fieldName; - List rawIdValues = splitField(fieldName, fieldValue); - this.idValues = rawIdValues.stream().map(rawIdValue -> { - return mappingValue(fieldName, rawIdValue); - }).collect(Collectors.toList()); - } - - @Override - public List buildVertices(boolean withProperty) { - List vertices = new ArrayList<>(this.idValues.size()); - for (Object idValue : this.idValues) { - Vertex vertex = new Vertex(vertexLabel.name()); - customizeId(vertexLabel, vertex, this.idField, idValue); - if (vertexIdEmpty(vertexLabel, vertex)) { - continue; - } - if (withProperty) { - String key = mapping().mappingField(this.idField); - // The id field is also used as a general property - if (vertexLabel.properties().contains(key)) { - addProperty(vertex, key, idValue); - } - addProperties(vertex, this.properties); - checkNonNullableKeys(vertex); - } - vertices.add(vertex); - } - return vertices; - } - } - - public class VertexPkKVPairs extends VertexKVPairs { - - /* - * The primary key names(mapped), allowed multiple - * like: [p_name,p_age] -> [name,age] - */ - private List pkNames; - /* - * The primary values(mapped), length is the same as pkNames - * like: [m,2] -> [marko,18] - */ - private Object[] pkValues; - - public VertexPkKVPairs(VertexLabel vertexLabel) { - super(vertexLabel); - } - - @Override - public void extractFromVertex(String[] names, Object[] values) { - List primaryKeys = this.vertexLabel.primaryKeys(); - this.pkNames = primaryKeys; - this.pkValues = new Object[primaryKeys.size()]; - // General properties - this.properties = new HashMap<>(); - for (int i = 0; i < names.length; i++) { - String fieldName = names[i]; - Object fieldValue = values[i]; - if (!retainField(fieldName, fieldValue)) { - continue; - } - String key = mapping().mappingField(fieldName); - if (primaryKeys.contains(key)) { - // Don't put priamry key/values into general properties - int index = primaryKeys.indexOf(key); - Object pkValue = mappingValue(fieldName, fieldValue); - this.pkValues[index] = pkValue; - } else { - Object value = mappingValue(fieldName, fieldValue); - this.properties.put(key, value); - } - } - } - - @Override - public void extractFromEdge(String[] names, Object[] values, - int[] fieldIndexes) { - this.pkNames = new ArrayList<>(fieldIndexes.length); - for (int fieldIndex : fieldIndexes) { - String fieldName = names[fieldIndex]; - String mappingField = mapping().mappingField(fieldName); - this.pkNames.add(mappingField); - } - List primaryKeys = this.vertexLabel.primaryKeys(); - E.checkArgument(ListUtils.isEqualList(this.pkNames, primaryKeys), - "Make sure the the primary key fields %s are " + - "not empty, or check whether the headers or " + - "field_mapping are configured correctly", - primaryKeys); - this.pkValues = new Object[this.pkNames.size()]; - for (int i = 0; i < fieldIndexes.length; i++) { - String fieldName = names[fieldIndexes[i]]; - Object fieldValue = values[fieldIndexes[i]]; - Object pkValue = mappingValue(fieldName, fieldValue); - this.pkValues[i] = pkValue; - } - } - - @Override - public List buildVertices(boolean withProperty) { - checkPrimaryValuesValid(vertexLabel, this.pkValues); - for (int i = 0; i < this.pkNames.size(); i++) { - if (isEmptyPkValue(this.pkValues[i])) { - return ImmutableList.of(); - } - Object pkValue = convertPropertyValue(this.pkNames.get(i), - this.pkValues[i]); - this.pkValues[i] = pkValue; - } - String id = spliceVertexId(vertexLabel, this.pkValues); - checkVertexIdLength(id); - - Vertex vertex = new Vertex(vertexLabel.name()); - // NOTE: withProperty is true means that parsing vertex - if (withProperty) { - for (int i = 0; i < this.pkNames.size(); i++) { - addProperty(vertex, this.pkNames.get(i), - this.pkValues[i], false); - } - addProperties(vertex, this.properties); - checkNonNullableKeys(vertex); - } - vertex.id(id); - return ImmutableList.of(vertex); - } - } - - public class VertexFlatPkKVPairs extends VertexKVPairs { - - /* - * The primary key name(mapped), must be single - * like: p_name -> name - */ - private String pkName; - /* - * The primary values(splited and mapped) - * like: m|v -> [marko,vadas] - */ - private List pkValues; - - public VertexFlatPkKVPairs(VertexLabel vertexLabel) { - super(vertexLabel); - } - - @Override - public void extractFromVertex(String[] names, Object[] values) { - List primaryKeys = vertexLabel.primaryKeys(); - E.checkArgument(primaryKeys.size() == 1, - "In case unfold is true, just supported " + - "a single primary key"); - this.pkName = primaryKeys.get(0); - // General properties - this.properties = new HashMap<>(); - boolean handledPk = false; - for (int i = 0; i < names.length; i++) { - String fieldName = names[i]; - Object fieldValue = values[i]; - if (!retainField(fieldName, fieldValue)) { - continue; - } - String key = mapping().mappingField(fieldName); - if (!handledPk && primaryKeys.contains(key)) { - // Don't put priamry key/values into general properties - List rawPkValues = splitField(fieldName, - fieldValue); - this.pkValues = rawPkValues.stream().map(rawPkValue -> { - return mappingValue(fieldName, rawPkValue); - }).collect(Collectors.toList()); - handledPk = true; - } else { - Object value = mappingValue(fieldName, fieldValue); - this.properties.put(key, value); - } - } - } - - @Override - public void extractFromEdge(String[] names, Object[] values, - int[] fieldIndexes) { - List primaryKeys = vertexLabel.primaryKeys(); - E.checkArgument(fieldIndexes.length == 1 && primaryKeys.size() == 1, - "In case unfold is true, just supported " + - "a single primary key"); - String fieldName = names[fieldIndexes[0]]; - this.pkName = mapping().mappingField(fieldName); - String primaryKey = primaryKeys.get(0); - E.checkArgument(this.pkName.equals(primaryKey), - "Make sure the the primary key field '%s' is " + - "not empty, or check whether the headers or " + - "field_mapping are configured correctly", - primaryKey); - Object fieldValue = values[fieldIndexes[0]]; - List rawPkValues = splitField(fieldName, fieldValue); - this.pkValues = rawPkValues.stream().map(rawPkValue -> { - return mappingValue(fieldName, rawPkValue); - }).collect(Collectors.toList()); - } - - @Override - public List buildVertices(boolean withProperty) { - E.checkArgument(this.pkValues != null, - "The primary values shouldn't be null"); - List vertices = new ArrayList<>(this.pkValues.size()); - for (Object pkValue : this.pkValues) { - if (isEmptyPkValue(pkValue)) { - continue; - } - pkValue = convertPropertyValue(this.pkName, pkValue); - String id = spliceVertexId(vertexLabel, pkValue); - checkVertexIdLength(id); - - Vertex vertex = new Vertex(vertexLabel.name()); - // NOTE: withProperty is true means that parsing vertex - if (withProperty) { - addProperty(vertex, this.pkName, pkValue, false); - addProperties(vertex, this.properties); - checkNonNullableKeys(vertex); - } - vertex.id(id); - vertices.add(vertex); - } - return vertices; - } - } -} diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/BytesBuffer.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/BytesBuffer.java index 75884aae5..81320e865 100644 --- a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/BytesBuffer.java +++ b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/BytesBuffer.java @@ -19,30 +19,16 @@ package com.baidu.hugegraph.serializer.direct.util; -import static com.baidu.hugegraph.serializer.direct.BinaryEntry.BinaryId; -import static com.baidu.hugegraph.serializer.direct.util.Id.UUID_LENGTH; - +import com.baidu.hugegraph.serializer.direct.struct.HugeType; +import com.baidu.hugegraph.util.*; import java.io.OutputStream; -import java.nio.Buffer; import java.nio.ByteBuffer; import java.util.Arrays; -import java.util.Collection; import java.util.Date; import java.util.UUID; -import com.baidu.hugegraph.backend.id.EdgeId; -import com.baidu.hugegraph.backend.id.Id.IdType; -import com.baidu.hugegraph.backend.id.IdGenerator; -import com.baidu.hugegraph.serializer.direct.struct.DataType; -import com.baidu.hugegraph.serializer.direct.struct.HugeType; -import com.baidu.hugegraph.serializer.direct.util.Id.IdType; -import com.baidu.hugegraph.structure.constant.Cardinality; -import com.baidu.hugegraph.structure.schema.PropertyKey; -import com.baidu.hugegraph.util.Bytes; -import com.baidu.hugegraph.util.E; - /** - * class BytesBuffer is an util for read/write binary + * class BytesBuffer is a util for read/write binary */ public final class BytesBuffer extends OutputStream { @@ -78,8 +64,6 @@ public final class BytesBuffer extends OutputStream { public static final int BUF_EDGE_ID = 128; public static final int BUF_PROPERTY = 64; - public static final byte[] BYTES_EMPTY = new byte[0]; - private ByteBuffer buffer; private final boolean resize; @@ -89,8 +73,8 @@ public BytesBuffer() { public BytesBuffer(int capacity) { E.checkArgument(capacity <= MAX_BUFFER_CAPACITY, - "Capacity exceeds max buffer capacity: %s", - MAX_BUFFER_CAPACITY); + "Capacity exceeds max buffer capacity: %s", + MAX_BUFFER_CAPACITY); this.buffer = ByteBuffer.allocate(capacity); this.resize = true; } @@ -122,7 +106,7 @@ public ByteBuffer asByteBuffer() { } public BytesBuffer forReadWritten() { - ((Buffer) this.buffer).flip(); + this.buffer.flip(); return this; } @@ -169,8 +153,8 @@ private void require(int size) { // Extra capacity as buffer int newcapacity = size + this.buffer.limit() + DEFAULT_CAPACITY; E.checkArgument(newcapacity <= MAX_BUFFER_CAPACITY, - "Capacity exceeds max buffer capacity: %s", - MAX_BUFFER_CAPACITY); + "Capacity exceeds max buffer capacity: %s", + MAX_BUFFER_CAPACITY); ByteBuffer newBuffer = ByteBuffer.allocate(newcapacity); this.buffer.flip(); newBuffer.put(this.buffer); @@ -262,7 +246,7 @@ public byte[] read(int length) { } public boolean readBoolean() { - return this.buffer.get() == 0 ? false : true; + return this.buffer.get() != 0; } public char readChar() { @@ -291,8 +275,8 @@ public double readDouble() { public BytesBuffer writeBytes(byte[] bytes) { E.checkArgument(bytes.length <= UINT16_MAX, - "The max length of bytes is %s, but got %s", - UINT16_MAX, bytes.length); + "The max length of bytes is %s, but got %s", + UINT16_MAX, bytes.length); require(SHORT_LEN + bytes.length); this.writeVInt(bytes.length); this.write(bytes); @@ -302,13 +286,14 @@ public BytesBuffer writeBytes(byte[] bytes) { public byte[] readBytes() { int length = this.readVInt(); assert length >= 0; - return this.read(length); + byte[] bytes = this.read(length); + return bytes; } public BytesBuffer writeBigBytes(byte[] bytes) { E.checkArgument(bytes.length <= BLOB_LEN_MAX, - "The max length of bytes is %s, but got %s", - BLOB_LEN_MAX, bytes.length); + "The max length of bytes is %s, but got %s", + BLOB_LEN_MAX, bytes.length); require(BLOB_LEN + bytes.length); this.writeVInt(bytes.length); this.write(bytes); @@ -318,7 +303,8 @@ public BytesBuffer writeBigBytes(byte[] bytes) { public byte[] readBigBytes() { int length = this.readVInt(); assert length >= 0; - return this.read(length); + byte[] bytes = this.read(length); + return bytes; } public BytesBuffer writeStringRaw(String val) { @@ -346,11 +332,11 @@ public BytesBuffer writeStringWithEnding(String value) { * 0xFF is not a valid byte in UTF8 bytes */ assert !Bytes.contains(bytes, STRING_ENDING_BYTE_FF) : - "Invalid UTF8 bytes: " + value; + "Invalid UTF8 bytes: " + value; if (Bytes.contains(bytes, STRING_ENDING_BYTE)) { E.checkArgument(false, - "Can't contains byte '0x00' in string: '%s'", - value); + "Can't contains byte '0x00' in string: '%s'", + value); } this.write(bytes); } @@ -433,10 +419,11 @@ public BytesBuffer writeVInt(int value) { public int readVInt() { byte leading = this.read(); E.checkArgument(leading != 0x80, - "Unexpected varint with leading byte '0x%s'", - Bytes.toHex(leading)); + "Unexpected varint with leading byte '0x%s'", + Bytes.toHex(leading)); int value = leading & 0x7f; if (leading >= 0) { + assert (leading & 0x80) == 0; return value; } @@ -452,11 +439,11 @@ public int readVInt() { } E.checkArgument(i < 5, - "Unexpected varint %s with too many bytes(%s)", - value, i + 1); + "Unexpected varint %s with too many bytes(%s)", + value, i + 1); E.checkArgument(i < 4 || (leading & 0x70) == 0, - "Unexpected varint %s with leading byte '0x%s'", - value, Bytes.toHex(leading)); + "Unexpected varint %s with leading byte '0x%s'", + value, Bytes.toHex(leading)); return value; } @@ -496,8 +483,8 @@ public BytesBuffer writeVLong(long value) { public long readVLong() { byte leading = this.read(); E.checkArgument(leading != 0x80, - "Unexpected varlong with leading byte '0x%s'", - Bytes.toHex(leading)); + "Unexpected varlong with leading byte '0x%s'", + Bytes.toHex(leading)); long value = leading & 0x7fL; if (leading >= 0) { assert (leading & 0x80) == 0; @@ -516,115 +503,31 @@ public long readVLong() { } E.checkArgument(i < 10, - "Unexpected varlong %s with too many bytes(%s)", - value, i + 1); + "Unexpected varlong %s with too many bytes(%s)", + value, i + 1); E.checkArgument(i < 9 || (leading & 0x7e) == 0, - "Unexpected varlong %s with leading byte '0x%s'", - value, Bytes.toHex(leading)); + "Unexpected varlong %s with leading byte '0x%s'", + value, Bytes.toHex(leading)); return value; } - public BytesBuffer writeProperty(PropertyKey pkey, Object value) { - if (pkey.cardinality() == Cardinality.SINGLE) { - this.writeProperty(pkey.dataType(), value); - return this; - } - - assert pkey.cardinality() == Cardinality.LIST || - pkey.cardinality() == Cardinality.SET; - Collection values = (Collection) value; - this.writeVInt(values.size()); - for (Object o : values) { - this.writeProperty(pkey.dataType(), o); - } - return this; - } - - public Object readProperty(PropertyKey pkey) { - if (pkey.cardinality() == Cardinality.SINGLE) { - return this.readProperty(pkey.dataType()); - } - - assert pkey.cardinality() == Cardinality.LIST || - pkey.cardinality() == Cardinality.SET; - int size = this.readVInt(); - Collection values = pkey.newValue(); - for (int i = 0; i < size; i++) { - values.add(this.readProperty(pkey.dataType())); - } - return values; - } - - public void writeProperty(DataType dataType, Object value) { - switch (dataType) { - case BOOLEAN: - this.writeVInt(((Boolean) value) ? 1 : 0); - break; - case BYTE: - this.writeVInt((Byte) value); - break; - case INT: - this.writeVInt((Integer) value); - break; - case FLOAT: - this.writeFloat((Float) value); - break; - case LONG: - this.writeVLong((Long) value); - break; - case DATE: - this.writeVLong(((Date) value).getTime()); - break; - case DOUBLE: - this.writeDouble((Double) value); - break; - case TEXT: - this.writeString((String) value); - break; - case UUID: - UUID uuid = (UUID) value; - // Generally writeVLong(uuid) can't save space - this.writeLong(uuid.getMostSignificantBits()); - this.writeLong(uuid.getLeastSignificantBits()); - break; - default: // ignore - } - } - - public Object readProperty(DataType dataType) { - switch (dataType) { - case BOOLEAN: - return this.readVInt() == 1; - case BYTE: - return (byte) this.readVInt(); - case INT: - return this.readVInt(); - case FLOAT: - return this.readFloat(); - case LONG: - return this.readVLong(); - case DATE: - return new Date(this.readVLong()); - case DOUBLE: - return this.readDouble(); - case TEXT: - return this.readString(); - case UUID: - return new UUID(this.readLong(), this.readLong()); - default: // ignore - } - } - public BytesBuffer writeId(Id id) { return this.writeId(id, false); } public BytesBuffer writeId(Id id, boolean big) { switch (id.type()) { - case EDGE: - // Edge Id - this.writeUInt8(0x7e); // 0b01111110 means EdgeId - this.writeEdgeId(id); + case LONG: + // Number Id + long value = id.asLong(); + this.writeNumber(value); + break; + case UUID: + // UUID Id + byte[] bytes = id.asBytes(); + assert bytes.length == Id.UUID_LENGTH; + this.writeUInt8(0x7f); // 0b01111111 means UUID + this.write(bytes); break; default: // String Id @@ -633,14 +536,14 @@ public BytesBuffer writeId(Id id, boolean big) { E.checkArgument(len > 0, "Can't write empty id"); if (!big) { E.checkArgument(len <= ID_LEN_MAX, - "Id max length is %s, but got %s {%s}", - ID_LEN_MAX, len, id); + "Id max length is %s, but got %s {%s}", + ID_LEN_MAX, len, id); len -= 1; // mapping [1, 128] to [0, 127] this.writeUInt8(len | 0x80); } else { E.checkArgument(len <= BIG_ID_LEN_MAX, - "Big id max length is %s, but got %s {%s}", - BIG_ID_LEN_MAX, len, id); + "Big id max length is %s, but got %s {%s}", + BIG_ID_LEN_MAX, len, id); len -= 1; int high = len >> 8; int low = len & 0xff; @@ -653,55 +556,6 @@ public BytesBuffer writeId(Id id, boolean big) { return this; } - public Id readId() { - return this.readId(false); - } - - public Id readId(boolean big) { - byte b = this.read(); - boolean number = (b & 0x80) == 0; - if (number) { - if (b == 0x7f) { - // UUID - return IdGenerator.of(this.read(UUID_LENGTH), IdType.UUID); - } else if (b == 0x7e) { - // Edge Id - return this.readEdgeId(); - } else { - // Number Id - return IdGenerator.of(this.readNumber(b)); - } - } else { - // String Id - int len = b & ID_LEN_MASK; - if (big) { - int high = len << 8; - int low = this.readUInt8(); - len = high + low; - } - len += 1; // restore [0, 127] to [1, 128] - byte[] id = this.read(len); - return IdGenerator.of(id, IdType.STRING); - } - } - - public BytesBuffer writeEdgeId(Id id) { - // owner-vertex + dir + edge-label + sort-values + other-vertex - EdgeId edge = (EdgeId) id; - this.writeId(edge.ownerVertexId()); - this.write(edge.directionCode()); - this.writeId(edge.edgeLabelId()); - this.writeStringWithEnding(edge.sortValues()); - this.writeId(edge.otherVertexId()); - return this; - } - - public Id readEdgeId() { - return new EdgeId(this.readId(), EdgeId.directionFromCode(this.read()), - this.readId(), this.readStringWithEnding(), - this.readId()); - } - public BytesBuffer writeIndexId(Id id, HugeType type) { return this.writeIndexId(id, type, true); } @@ -716,10 +570,10 @@ public BytesBuffer writeIndexId(Id id, HugeType type, boolean withEnding) { if (Bytes.contains(bytes, STRING_ENDING_BYTE)) { // Not allow STRING_ENDING_BYTE exist in string index id E.checkArgument(false, - "The %s type index id can't contains " + + "The %s type index id can't contains " + "byte '0x%s', but got: 0x%s", type, - Bytes.toHex(STRING_ENDING_BYTE), - Bytes.toHex(bytes)); + Bytes.toHex(STRING_ENDING_BYTE), + Bytes.toHex(bytes)); } if (withEnding) { this.writeStringWithEnding(""); @@ -728,47 +582,6 @@ public BytesBuffer writeIndexId(Id id, HugeType type, boolean withEnding) { return this; } - public BinaryId readIndexId(HugeType type) { - byte[] id; - if (type.isRange4Index()) { - // IndexLabel 4 bytes + fieldValue 4 bytes - id = this.read(8); - } else if (type.isRange8Index()) { - // IndexLabel 4 bytes + fieldValue 8 bytes - id = this.read(12); - } else { - assert type.isStringIndex(); - id = this.readBytesWithEnding(); - } - return new BinaryId(id, IdGenerator.of(id, IdType.STRING)); - } - - public BinaryId asId() { - return new BinaryId(this.bytes(), null); - } - - public BinaryId parseId(HugeType type, boolean enablePartition) { - if (type.isIndex()) { - return this.readIndexId(type); - } - // Parse id from bytes - if ((type.isVertex() || type.isEdge()) && enablePartition) { - this.readShort(); - } - int start = this.buffer.position(); - /* - * Since edge id in edges table doesn't prefix with leading 0x7e, - * so readId() will return the source vertex id instead of edge id, - * can't call: type.isEdge() ? this.readEdgeId() : this.readId(); - */ - Id id = this.readId(); - int end = this.buffer.position(); - int len = end - start; - byte[] bytes = new byte[len]; - System.arraycopy(this.array(), start, bytes, 0, len); - return new BinaryId(bytes, id); - } - private void writeNumber(long val) { /* * 8 kinds of number, 2 ~ 9 bytes number: @@ -838,8 +651,8 @@ private void writeNumber(long val) { private long readNumber(byte b) { E.checkArgument((b & 0x80) == 0, - "Not a number type with prefix byte '0x%s'", - Bytes.toHex(b)); + "Not a number type with prefix byte '0x%s'", + Bytes.toHex(b)); // Parse the kind from byte 0kkksxxx int kind = b >>> 4; boolean positive = (b & 0x08) > 0; @@ -866,8 +679,8 @@ private long readNumber(byte b) { break; case 6: value |= (long) this.readUInt8() << 48 | - (long) this.readUInt16() << 32 | - this.readUInt32(); + (long) this.readUInt16() << 32 | + this.readUInt32(); break; case 7: assert high3bits == 0L; @@ -887,8 +700,7 @@ private long readNumber(byte b) { private byte[] readBytesWithEnding() { int start = this.buffer.position(); boolean foundEnding = false; - int remaining = this.remaining(); - for (int i = 0; i < remaining; i++) { + while (this.remaining() > 0) { byte current = this.read(); if (current == STRING_ENDING_BYTE) { foundEnding = true; @@ -896,14 +708,52 @@ private byte[] readBytesWithEnding() { } } E.checkArgument(foundEnding, "Not found ending '0x%s'", - Bytes.toHex(STRING_ENDING_BYTE)); + Bytes.toHex(STRING_ENDING_BYTE)); int end = this.buffer.position() - 1; int len = end - start; - if (len <= 0) { - return BYTES_EMPTY; - } byte[] bytes = new byte[len]; System.arraycopy(this.array(), start, bytes, 0, len); return bytes; } + + public void writeProperty(com.baidu.hugegraph.structure.constant.DataType dataType, Object value) { + switch (dataType) { + case BOOLEAN: + this.writeVInt(((Boolean) value) ? 1 : 0); + break; + case BYTE: + this.writeVInt((Byte) value); + break; + case INT: + this.writeVInt((Integer) value); + break; + case FLOAT: + this.writeFloat((Float) value); + break; + case LONG: + this.writeVLong((Long) value); + break; + case DATE: + this.writeVLong(((Date) value).getTime()); + break; + case DOUBLE: + this.writeDouble((Double) value); + break; + case TEXT: + this.writeString((String) value); + break; + + case UUID: + UUID uuid = (UUID) value; + // Generally writeVLong(uuid) can't save space + this.writeLong(uuid.getMostSignificantBits()); + this.writeLong(uuid.getLeastSignificantBits()); + break; + default: + //this.writeBytes(KryoUtil.toKryoWithType(value)); + break; + } + + } + } diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/DataTypeUtil.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/DataTypeUtil.java index 7e3d87691..a597b09a9 100644 --- a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/DataTypeUtil.java +++ b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/DataTypeUtil.java @@ -25,308 +25,308 @@ import java.util.List; import java.util.Set; import java.util.UUID; - -import com.baidu.hugegraph.loader.constant.Constants; -import com.baidu.hugegraph.loader.source.AbstractSource; -import com.baidu.hugegraph.loader.source.InputSource; -import com.baidu.hugegraph.loader.source.file.FileSource; -import com.baidu.hugegraph.loader.source.file.ListFormat; -import com.baidu.hugegraph.structure.constant.Cardinality; -import com.baidu.hugegraph.structure.constant.DataType; -import com.baidu.hugegraph.structure.schema.PropertyKey; -import com.baidu.hugegraph.util.DateUtil; -import com.baidu.hugegraph.util.E; -import com.baidu.hugegraph.util.InsertionOrderUtil; -import com.baidu.hugegraph.util.ReflectionUtil; -import com.google.common.base.Splitter; -import com.google.common.collect.ImmutableSet; +// +//import com.baidu.hugegraph.loader.constant.Constants; +//import com.baidu.hugegraph.loader.source.AbstractSource; +//import com.baidu.hugegraph.loader.source.InputSource; +//import com.baidu.hugegraph.loader.source.file.FileSource; +//import com.baidu.hugegraph.loader.source.file.ListFormat; +//import com.baidu.hugegraph.structure.constant.Cardinality; +//import com.baidu.hugegraph.structure.constant.DataType; +//import com.baidu.hugegraph.structure.schema.PropertyKey; +//import com.baidu.hugegraph.util.DateUtil; +//import com.baidu.hugegraph.util.E; +//import com.baidu.hugegraph.util.InsertionOrderUtil; +//import com.baidu.hugegraph.util.ReflectionUtil; +//import com.google.common.base.Splitter; +//import com.google.common.collect.ImmutableSet; public final class DataTypeUtil { - - private static final Set ACCEPTABLE_TRUE = ImmutableSet.of( - "true", "1", "yes", "y" - ); - private static final Set ACCEPTABLE_FALSE = ImmutableSet.of( - "false", "0", "no", "n" - ); - - public static boolean isSimpleValue(Object value) { - if (value == null) { - return false; - } - return ReflectionUtil.isSimpleType(value.getClass()); - } - - public static Object convert(Object value, PropertyKey propertyKey, InputSource source) { - E.checkArgumentNotNull(value, - "The value to be converted can't be null"); - - String key = propertyKey.name(); - DataType dataType = propertyKey.dataType(); - Cardinality cardinality = propertyKey.cardinality(); - switch (cardinality) { - case SINGLE: - return parseSingleValue(key, value, dataType, source); - case SET: - case LIST: - return parseMultiValues(key, value, dataType, cardinality, source); - default: - throw new AssertionError(String.format( - "Unsupported cardinality: '%s'", cardinality)); - } - } - - @SuppressWarnings("unchecked") - public static List splitField(String key, Object rawColumnValue, InputSource source) { - E.checkArgument(rawColumnValue != null, - "The value to be split can't be null"); - if (rawColumnValue instanceof Collection) { - return (List) rawColumnValue; - } - // TODO: Seems a bit violent - String rawValue = rawColumnValue.toString(); - return split(key, rawValue, source); - } - - public static long parseNumber(String key, Object rawValue) { - if (rawValue instanceof Number) { - return ((Number) rawValue).longValue(); - } else if (rawValue instanceof String) { - // trim() is a little time-consuming - return parseLong(((String) rawValue).trim()); - } - throw new IllegalArgumentException(String.format( - "The value(key='%s') must can be casted to Long, " + - "but got '%s'(%s)", key, rawValue, rawValue.getClass().getName())); - } - - public static UUID parseUUID(String key, Object rawValue) { - if (rawValue instanceof UUID) { - return (UUID) rawValue; - } else if (rawValue instanceof String) { - String value = ((String) rawValue).trim(); - if (value.contains("-")) { - return UUID.fromString(value); - } - // UUID represented by hex string - E.checkArgument(value.length() == 32, "Invalid UUID value(key='%s') '%s'", key, value); - String high = value.substring(0, 16); - String low = value.substring(16); - return new UUID(Long.parseUnsignedLong(high, 16), Long.parseUnsignedLong(low, 16)); - } - throw new IllegalArgumentException(String.format( - "Failed to convert value(key='%s') '%s'(%s) to UUID", - key, rawValue, rawValue.getClass())); - } - - private static Object parseSingleValue(String key, Object rawValue, - DataType dataType, - InputSource source) { - // Trim space if raw value is string - Object value = rawValue; - if (rawValue instanceof String) { - value = ((String) rawValue).trim(); - } - if (dataType.isNumber()) { - return parseNumber(key, value, dataType); - } else if (dataType.isBoolean()) { - return parseBoolean(key, value); - } else if (dataType.isDate()) { - E.checkState(source instanceof FileSource, - "Only accept FileSource when convert String value " + - "to Date, but got '%s'", source.getClass().getName()); - String dateFormat = ((FileSource) source).dateFormat(); - String timeZone = ((FileSource) source).timeZone(); - return parseDate(key, value, dateFormat, timeZone); - } else if (dataType.isUUID()) { - return parseUUID(key, value); - } - E.checkArgument(checkDataType(key, value, dataType), - "The value(key='%s') '%s'(%s) is not match with " + - "data type %s and can't convert to it", - key, value, value.getClass(), dataType); - return value; - } - - /** - * collection format: "obj1,obj2,...,objn" or "[obj1,obj2,...,objn]" ..etc - * TODO: After parsing to json, the order of the collection changed - * in some cases (such as list) - **/ - private static Object parseMultiValues(String key, Object values, - DataType dataType, - Cardinality cardinality, - InputSource source) { - // JSON file should not parse again - if (values instanceof Collection && - checkCollectionDataType(key, (Collection) values, dataType)) { - return values; - } - - E.checkState(values instanceof String, - "The value(key='%s') must be String type, " + - "but got '%s'(%s)", key, values); - String rawValue = (String) values; - List valueColl = split(key, rawValue, source); - Collection results = cardinality == Cardinality.LIST ? - InsertionOrderUtil.newList() : - InsertionOrderUtil.newSet(); - valueColl.forEach(value -> { - results.add(parseSingleValue(key, value, dataType, source)); - }); - E.checkArgument(checkCollectionDataType(key, results, dataType), - "Not all collection elems %s match with data type %s", - results, dataType); - return results; - } - - private static Boolean parseBoolean(String key, Object rawValue) { - if (rawValue instanceof Boolean) { - return (Boolean) rawValue; - } - if (rawValue instanceof String) { - String value = ((String) rawValue).toLowerCase(); - if (ACCEPTABLE_TRUE.contains(value)) { - return true; - } else if (ACCEPTABLE_FALSE.contains(value)) { - return false; - } else { - throw new IllegalArgumentException(String.format( - "Failed to convert '%s'(key='%s') to Boolean, " + - "the acceptable boolean strings are %s or %s", - key, rawValue, ACCEPTABLE_TRUE, ACCEPTABLE_FALSE)); - } - } - throw new IllegalArgumentException(String.format( - "Failed to convert value(key='%s') '%s'(%s) to Boolean", - key, rawValue, rawValue.getClass())); - } - - private static Number parseNumber(String key, Object value, - DataType dataType) { - E.checkState(dataType.isNumber(), - "The target data type must be number"); - - if (dataType.clazz().isInstance(value)) { - return (Number) value; - } - try { - switch (dataType) { - case BYTE: - return Byte.valueOf(value.toString()); - case INT: - return Integer.valueOf(value.toString()); - case LONG: - return parseLong(value.toString()); - case FLOAT: - return Float.valueOf(value.toString()); - case DOUBLE: - return Double.valueOf(value.toString()); - default: - throw new AssertionError(String.format( - "Number type only contains Byte, Integer, " + - "Long, Float, Double, but got %s", - dataType.clazz())); - } - } catch (NumberFormatException e) { - throw new IllegalArgumentException(String.format( - "Failed to convert value(key=%s) '%s'(%s) to Number", - key, value, value.getClass()), e); - } - } - - private static long parseLong(String rawValue) { - if (rawValue.startsWith("-")) { - return Long.parseLong(rawValue); - } else { - return Long.parseUnsignedLong(rawValue); - } - } - - private static Date parseDate(String key, Object value, - String dateFormat, String timeZone) { - if (value instanceof Date) { - return (Date) value; - } - if (value instanceof Number) { - return new Date(((Number) value).longValue()); - } else if (value instanceof String) { - if (Constants.TIMESTAMP.equals(dateFormat)) { - try { - long timestamp = Long.parseLong((String) value); - return new Date(timestamp); - } catch (NumberFormatException e) { - throw new IllegalArgumentException(String.format( - "Invalid timestamp value '%s'", value)); - } - } else { - return DateUtil.parse((String) value, dateFormat, timeZone); - } - } - throw new IllegalArgumentException(String.format( - "Failed to convert value(key='%s') '%s'(%s) to Date", - key, value, value.getClass())); - } - - private static List split(String key, String rawValue, - InputSource source) { - List valueColl = new ArrayList<>(); - if (rawValue.isEmpty()) { - return valueColl; - } - E.checkState(AbstractSource.class.isAssignableFrom(source.getClass()), - "Only accept AbstractSource when parse multi values, " + - "but got '%s'", source.getClass().getName()); - ListFormat listFormat = ((AbstractSource) source).listFormat(); - E.checkArgumentNotNull(listFormat, "The list_format must be set when " + - "parse list or set values"); - - String startSymbol = listFormat.startSymbol(); - String endSymbol = listFormat.endSymbol(); - E.checkArgument(rawValue.length() >= - startSymbol.length() + endSymbol.length(), - "The value(key='%s') '%s' length(%s) must be >= " + - "start symbol '%s' + end symbol '%s' length", - key, rawValue, rawValue.length(), - startSymbol, endSymbol); - E.checkArgument(rawValue.startsWith(startSymbol) && - rawValue.endsWith(endSymbol), - "The value(key='%s') must start with '%s' and " + - "end with '%s', but got '%s'", - key, startSymbol, endSymbol, rawValue); - rawValue = rawValue.substring(startSymbol.length(), - rawValue.length() - endSymbol.length()); - String elemDelimiter = listFormat.elemDelimiter(); - Splitter.on(elemDelimiter).split(rawValue).forEach(value -> { - if (!listFormat.ignoredElems().contains(value)) { - valueColl.add(value); - } - }); - return valueColl; - } - - /** - * Check type of the value valid - */ - private static boolean checkDataType(String key, Object value, - DataType dataType) { - if (value instanceof Number) { - return parseNumber(key, value, dataType) != null; - } - return dataType.clazz().isInstance(value); - } - - /** - * Check type of all the values(may be some of list properties) valid - */ - private static boolean checkCollectionDataType(String key, - Collection values, - DataType dataType) { - for (Object value : values) { - if (!checkDataType(key, value, dataType)) { - return false; - } - } - return true; - } +// +// private static final Set ACCEPTABLE_TRUE = ImmutableSet.of( +// "true", "1", "yes", "y" +// ); +// private static final Set ACCEPTABLE_FALSE = ImmutableSet.of( +// "false", "0", "no", "n" +// ); +// +// public static boolean isSimpleValue(Object value) { +// if (value == null) { +// return false; +// } +// return ReflectionUtil.isSimpleType(value.getClass()); +// } +// +// public static Object convert(Object value, PropertyKey propertyKey, InputSource source) { +// E.checkArgumentNotNull(value, +// "The value to be converted can't be null"); +// +// String key = propertyKey.name(); +// DataType dataType = propertyKey.dataType(); +// Cardinality cardinality = propertyKey.cardinality(); +// switch (cardinality) { +// case SINGLE: +// return parseSingleValue(key, value, dataType, source); +// case SET: +// case LIST: +// return parseMultiValues(key, value, dataType, cardinality, source); +// default: +// throw new AssertionError(String.format( +// "Unsupported cardinality: '%s'", cardinality)); +// } +// } +// +// @SuppressWarnings("unchecked") +// public static List splitField(String key, Object rawColumnValue, InputSource source) { +// E.checkArgument(rawColumnValue != null, +// "The value to be split can't be null"); +// if (rawColumnValue instanceof Collection) { +// return (List) rawColumnValue; +// } +// // TODO: Seems a bit violent +// String rawValue = rawColumnValue.toString(); +// return split(key, rawValue, source); +// } +// +// public static long parseNumber(String key, Object rawValue) { +// if (rawValue instanceof Number) { +// return ((Number) rawValue).longValue(); +// } else if (rawValue instanceof String) { +// // trim() is a little time-consuming +// return parseLong(((String) rawValue).trim()); +// } +// throw new IllegalArgumentException(String.format( +// "The value(key='%s') must can be casted to Long, " + +// "but got '%s'(%s)", key, rawValue, rawValue.getClass().getName())); +// } +// +// public static UUID parseUUID(String key, Object rawValue) { +// if (rawValue instanceof UUID) { +// return (UUID) rawValue; +// } else if (rawValue instanceof String) { +// String value = ((String) rawValue).trim(); +// if (value.contains("-")) { +// return UUID.fromString(value); +// } +// // UUID represented by hex string +// E.checkArgument(value.length() == 32, "Invalid UUID value(key='%s') '%s'", key, value); +// String high = value.substring(0, 16); +// String low = value.substring(16); +// return new UUID(Long.parseUnsignedLong(high, 16), Long.parseUnsignedLong(low, 16)); +// } +// throw new IllegalArgumentException(String.format( +// "Failed to convert value(key='%s') '%s'(%s) to UUID", +// key, rawValue, rawValue.getClass())); +// } +// +// private static Object parseSingleValue(String key, Object rawValue, +// DataType dataType, +// InputSource source) { +// // Trim space if raw value is string +// Object value = rawValue; +// if (rawValue instanceof String) { +// value = ((String) rawValue).trim(); +// } +// if (dataType.isNumber()) { +// return parseNumber(key, value, dataType); +// } else if (dataType.isBoolean()) { +// return parseBoolean(key, value); +// } else if (dataType.isDate()) { +// E.checkState(source instanceof FileSource, +// "Only accept FileSource when convert String value " + +// "to Date, but got '%s'", source.getClass().getName()); +// String dateFormat = ((FileSource) source).dateFormat(); +// String timeZone = ((FileSource) source).timeZone(); +// return parseDate(key, value, dateFormat, timeZone); +// } else if (dataType.isUUID()) { +// return parseUUID(key, value); +// } +// E.checkArgument(checkDataType(key, value, dataType), +// "The value(key='%s') '%s'(%s) is not match with " + +// "data type %s and can't convert to it", +// key, value, value.getClass(), dataType); +// return value; +// } +// +// /** +// * collection format: "obj1,obj2,...,objn" or "[obj1,obj2,...,objn]" ..etc +// * TODO: After parsing to json, the order of the collection changed +// * in some cases (such as list) +// **/ +// private static Object parseMultiValues(String key, Object values, +// DataType dataType, +// Cardinality cardinality, +// InputSource source) { +// // JSON file should not parse again +// if (values instanceof Collection && +// checkCollectionDataType(key, (Collection) values, dataType)) { +// return values; +// } +// +// E.checkState(values instanceof String, +// "The value(key='%s') must be String type, " + +// "but got '%s'(%s)", key, values); +// String rawValue = (String) values; +// List valueColl = split(key, rawValue, source); +// Collection results = cardinality == Cardinality.LIST ? +// InsertionOrderUtil.newList() : +// InsertionOrderUtil.newSet(); +// valueColl.forEach(value -> { +// results.add(parseSingleValue(key, value, dataType, source)); +// }); +// E.checkArgument(checkCollectionDataType(key, results, dataType), +// "Not all collection elems %s match with data type %s", +// results, dataType); +// return results; +// } +// +// private static Boolean parseBoolean(String key, Object rawValue) { +// if (rawValue instanceof Boolean) { +// return (Boolean) rawValue; +// } +// if (rawValue instanceof String) { +// String value = ((String) rawValue).toLowerCase(); +// if (ACCEPTABLE_TRUE.contains(value)) { +// return true; +// } else if (ACCEPTABLE_FALSE.contains(value)) { +// return false; +// } else { +// throw new IllegalArgumentException(String.format( +// "Failed to convert '%s'(key='%s') to Boolean, " + +// "the acceptable boolean strings are %s or %s", +// key, rawValue, ACCEPTABLE_TRUE, ACCEPTABLE_FALSE)); +// } +// } +// throw new IllegalArgumentException(String.format( +// "Failed to convert value(key='%s') '%s'(%s) to Boolean", +// key, rawValue, rawValue.getClass())); +// } +// +// private static Number parseNumber(String key, Object value, +// DataType dataType) { +// E.checkState(dataType.isNumber(), +// "The target data type must be number"); +// +// if (dataType.clazz().isInstance(value)) { +// return (Number) value; +// } +// try { +// switch (dataType) { +// case BYTE: +// return Byte.valueOf(value.toString()); +// case INT: +// return Integer.valueOf(value.toString()); +// case LONG: +// return parseLong(value.toString()); +// case FLOAT: +// return Float.valueOf(value.toString()); +// case DOUBLE: +// return Double.valueOf(value.toString()); +// default: +// throw new AssertionError(String.format( +// "Number type only contains Byte, Integer, " + +// "Long, Float, Double, but got %s", +// dataType.clazz())); +// } +// } catch (NumberFormatException e) { +// throw new IllegalArgumentException(String.format( +// "Failed to convert value(key=%s) '%s'(%s) to Number", +// key, value, value.getClass()), e); +// } +// } +// +// private static long parseLong(String rawValue) { +// if (rawValue.startsWith("-")) { +// return Long.parseLong(rawValue); +// } else { +// return Long.parseUnsignedLong(rawValue); +// } +// } +// +// private static Date parseDate(String key, Object value, +// String dateFormat, String timeZone) { +// if (value instanceof Date) { +// return (Date) value; +// } +// if (value instanceof Number) { +// return new Date(((Number) value).longValue()); +// } else if (value instanceof String) { +// if (Constants.TIMESTAMP.equals(dateFormat)) { +// try { +// long timestamp = Long.parseLong((String) value); +// return new Date(timestamp); +// } catch (NumberFormatException e) { +// throw new IllegalArgumentException(String.format( +// "Invalid timestamp value '%s'", value)); +// } +// } else { +// return DateUtil.parse((String) value, dateFormat, timeZone); +// } +// } +// throw new IllegalArgumentException(String.format( +// "Failed to convert value(key='%s') '%s'(%s) to Date", +// key, value, value.getClass())); +// } +// +// private static List split(String key, String rawValue, +// InputSource source) { +// List valueColl = new ArrayList<>(); +// if (rawValue.isEmpty()) { +// return valueColl; +// } +// E.checkState(AbstractSource.class.isAssignableFrom(source.getClass()), +// "Only accept AbstractSource when parse multi values, " + +// "but got '%s'", source.getClass().getName()); +// ListFormat listFormat = ((AbstractSource) source).listFormat(); +// E.checkArgumentNotNull(listFormat, "The list_format must be set when " + +// "parse list or set values"); +// +// String startSymbol = listFormat.startSymbol(); +// String endSymbol = listFormat.endSymbol(); +// E.checkArgument(rawValue.length() >= +// startSymbol.length() + endSymbol.length(), +// "The value(key='%s') '%s' length(%s) must be >= " + +// "start symbol '%s' + end symbol '%s' length", +// key, rawValue, rawValue.length(), +// startSymbol, endSymbol); +// E.checkArgument(rawValue.startsWith(startSymbol) && +// rawValue.endsWith(endSymbol), +// "The value(key='%s') must start with '%s' and " + +// "end with '%s', but got '%s'", +// key, startSymbol, endSymbol, rawValue); +// rawValue = rawValue.substring(startSymbol.length(), +// rawValue.length() - endSymbol.length()); +// String elemDelimiter = listFormat.elemDelimiter(); +// Splitter.on(elemDelimiter).split(rawValue).forEach(value -> { +// if (!listFormat.ignoredElems().contains(value)) { +// valueColl.add(value); +// } +// }); +// return valueColl; +// } +// +// /** +// * Check type of the value valid +// */ +// private static boolean checkDataType(String key, Object value, +// DataType dataType) { +// if (value instanceof Number) { +// return parseNumber(key, value, dataType) != null; +// } +// return dataType.clazz().isInstance(value); +// } +// +// /** +// * Check type of all the values(may be some of list properties) valid +// */ +// private static boolean checkCollectionDataType(String key, +// Collection values, +// DataType dataType) { +// for (Object value : values) { +// if (!checkDataType(key, value, dataType)) { +// return false; +// } +// } +// return true; +// } } diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/EdgeId.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/EdgeId.java deleted file mode 100644 index ff8e84d2f..000000000 --- a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/EdgeId.java +++ /dev/null @@ -1,242 +0,0 @@ -/* - * Copyright 2017 HugeGraph Authors - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with this - * work for additional information regarding copyright ownership. The ASF - * licenses this file to You under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.baidu.hugegraph.serializer.direct.util; - -import com.baidu.hugegraph.exception.NotFoundException; -import com.baidu.hugegraph.serializer.direct.struct.HugeType; -import com.baidu.hugegraph.structure.constant.Direction; -import com.baidu.hugegraph.util.E; -import com.baidu.hugegraph.util.IdUtil; - -/** - * Class used to format and parse id of edge, the edge id consists of: - * { source-vertex-id + edge-label + edge-name + target-vertex-id } - * NOTE: if we use `entry.type()` which is IN or OUT as a part of id, - * an edge's id will be different due to different directions (belongs - * to 2 owner vertex) - */ -public class EdgeId implements Id { - - private final Id ownerVertexId; - private final Direction direction; - private final Id edgeLabelId; - private final String sortValues; - private final Id otherVertexId; - - private final boolean directed; - private String cache; - - public EdgeId(Id ownerVertexId, Direction direction, Id edgeLabelId, - String sortValues, Id otherVertexId) { - this(ownerVertexId, direction, edgeLabelId, - sortValues, otherVertexId, false); - } - - public EdgeId(Id ownerVertexId, Direction direction, Id edgeLabelId, - String sortValues, Id otherVertexId, boolean directed) { - this.ownerVertexId = ownerVertexId; - this.direction = direction; - this.edgeLabelId = edgeLabelId; - this.sortValues = sortValues; - this.otherVertexId = otherVertexId; - this.directed = directed; - this.cache = null; - } - - private Id sourceVertexId() { - return this.direction == Direction.OUT ? - this.ownerVertexId : - this.otherVertexId; - } - - private Id targetVertexId() { - return this.direction == Direction.OUT ? - this.otherVertexId : - this.ownerVertexId; - } - - public Id ownerVertexId() { - return this.ownerVertexId; - } - - public Id edgeLabelId() { - return this.edgeLabelId; - } - - public Direction direction() { - return this.direction; - } - - public byte directionCode() { - return directionToCode(this.direction); - } - - public String sortValues() { - return this.sortValues; - } - - public Id otherVertexId() { - return this.otherVertexId; - } - - public String asString() { - if (this.cache != null) { - return this.cache; - } - if (this.directed) { - this.cache = SplicingIdGenerator.concat( - IdUtil.writeString(this.ownerVertexId), - this.direction.type().string(), - IdUtil.writeLong(this.edgeLabelId), - this.sortValues, - IdUtil.writeString(this.otherVertexId)); - } else { - this.cache = SplicingIdGenerator.concat( - IdUtil.writeString(this.sourceVertexId()), - IdUtil.writeLong(this.edgeLabelId), - this.sortValues, - IdUtil.writeString(this.targetVertexId())); - } - return this.cache; - } - - @Override - public byte[] asBytes() { - return StringEncoding.encode(this.asString()); - } - - @Override - public int length() { - return this.asString().length(); - } - - @Override - public IdType type() { - return IdType.EDGE; - } - - @Override - public int hashCode() { - if (this.directed) { - return this.ownerVertexId.hashCode() ^ - this.direction.hashCode() ^ - this.edgeLabelId.hashCode() ^ - this.sortValues.hashCode() ^ - this.otherVertexId.hashCode(); - } else { - return this.sourceVertexId().hashCode() ^ - this.edgeLabelId.hashCode() ^ - this.sortValues.hashCode() ^ - this.targetVertexId().hashCode(); - } - } - - @Override - public boolean equals(Object object) { - if (!(object instanceof EdgeId)) { - return false; - } - EdgeId other = (EdgeId) object; - if (this.directed) { - return this.ownerVertexId.equals(other.ownerVertexId) && - this.direction == other.direction && - this.edgeLabelId.equals(other.edgeLabelId) && - this.sortValues.equals(other.sortValues) && - this.otherVertexId.equals(other.otherVertexId); - } else { - return this.sourceVertexId().equals(other.sourceVertexId()) && - this.edgeLabelId.equals(other.edgeLabelId) && - this.sortValues.equals(other.sortValues) && - this.targetVertexId().equals(other.targetVertexId()); - } - } - - @Override - public String toString() { - return this.asString(); - } - - public static byte directionToCode(Direction direction) { - return direction.type().code(); - } - - public static EdgeId parse(String id) { - return parse(id, false); - } - - public static EdgeId parse(String id, boolean returnNullIfError) { - String[] idParts = SplicingIdGenerator.split(id); - if (!(idParts.length == 4 || idParts.length == 5)) { - if (returnNullIfError) { - return null; - } - throw new HugeException("EdgeId must format as 4~5 parts, but got %s parts: '%s'", - idParts.length, id); - } - try { - if (idParts.length == 4) { - Id ownerVertexId = IdUtil.readString(idParts[0]); - Id edgeLabelId = IdUtil.readLong(idParts[1]); - String sortValues = idParts[2]; - Id otherVertexId = IdUtil.readString(idParts[3]); - return new EdgeId(ownerVertexId, Direction.OUT, edgeLabelId, - sortValues, otherVertexId); - } else { - assert idParts.length == 5; - Id ownerVertexId = IdUtil.readString(idParts[0]); - HugeType direction = HugeType.fromString(idParts[1]); - Id edgeLabelId = IdUtil.readLong(idParts[2]); - String sortValues = idParts[3]; - Id otherVertexId = IdUtil.readString(idParts[4]); - return new EdgeId(ownerVertexId, Direction.convert(direction), - edgeLabelId, sortValues, otherVertexId); - } - } catch (Throwable e) { - if (returnNullIfError) { - return null; - } - throw new HugeException("Invalid format of edge id '%s'", e, id); - } - } - - public static Id parseStoredString(String id) { - String[] idParts = split(id); - E.checkArgument(idParts.length == 4, "Invalid id format: %s", id); - Id ownerVertexId = IdUtil.readStoredString(idParts[0]); - Id edgeLabelId = IdGenerator.ofStoredString(idParts[1], IdType.LONG); - String sortValues = idParts[2]; - Id otherVertexId = IdUtil.readStoredString(idParts[3]); - return new EdgeId(ownerVertexId, Direction.OUT, edgeLabelId, - sortValues, otherVertexId); - } - - public static String asStoredString(Id id) { - EdgeId eid = (EdgeId) id; - return SplicingIdGenerator.concat( - IdUtil.writeStoredString(eid.sourceVertexId()), - IdGenerator.asStoredString(eid.edgeLabelId()), - eid.sortValues(), - IdUtil.writeStoredString(eid.targetVertexId())); - } - - public static String[] split(String id) { - return SplicingIdGenerator.split(id); - } -} diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/Id.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/Id.java index 3cc7429f1..0aee0d4f4 100644 --- a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/Id.java +++ b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/Id.java @@ -21,21 +21,39 @@ import com.baidu.hugegraph.util.E; -public interface Id { +public interface Id extends Comparable { - int UUID_LENGTH = 16; + public static final int UUID_LENGTH = 16; - int length(); + public Object asObject(); - byte[] asBytes(); + public String asString(); - IdType type(); + public long asLong(); - default boolean edge() { + public byte[] asBytes(); + + public int length(); + + public IdType type(); + + public default boolean number() { + return this.type() == IdType.LONG; + } + + public default boolean uuid() { + return this.type() == IdType.UUID; + } + + public default boolean string() { + return this.type() == IdType.STRING; + } + + public default boolean edge() { return this.type() == IdType.EDGE; } - enum IdType { + public enum IdType { UNKNOWN, LONG, @@ -52,7 +70,7 @@ public char prefix() { public static IdType valueOfPrefix(String id) { E.checkArgument(id != null && id.length() > 0, - "Invalid id '%s'", id); + "Invalid id '%s'", id); switch (id.charAt(0)) { case 'L': return IdType.LONG; diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/IdGenerator.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/IdGenerator.java new file mode 100644 index 000000000..df5a4e290 --- /dev/null +++ b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/IdGenerator.java @@ -0,0 +1,421 @@ +package com.baidu.hugegraph.serializer.direct.util; + + +import com.baidu.hugegraph.util.E; +import com.baidu.hugegraph.util.LongEncoding; +import com.baidu.hugegraph.util.NumericUtil; +import com.baidu.hugegraph.serializer.direct.util.Id.IdType; + +import java.util.Objects; +import java.util.UUID; + +public abstract class IdGenerator { + + public static final Id ZERO = IdGenerator.of(0L); + + public final static Id of(String id) { + return new StringId(id); + } + + public final static Id of(UUID id) { + return new UuidId(id); + } + + public final static Id of(String id, boolean uuid) { + return uuid ? new UuidId(id) : new StringId(id); + } + + public final static Id of(long id) { + return new LongId(id); + } + + public static Id of(Object id) { + if (id instanceof Id) { + return (Id) id; + } else if (id instanceof String) { + return of((String) id); + } else if (id instanceof Number) { + return of(((Number) id).longValue()); + } else if (id instanceof UUID) { + return of((UUID) id); + } + return new ObjectId(id); + } + + public final static Id of(byte[] bytes, IdType type) { + switch (type) { + case LONG: + return new LongId(bytes); + case UUID: + return new UuidId(bytes); + case STRING: + return new StringId(bytes); + default: + throw new AssertionError("Invalid id type " + type); + } + } + + public final static Id ofStoredString(String id, IdType type) { + switch (type) { + case LONG: + return of(LongEncoding.decodeSignedB64(id)); + case UUID: + byte[] bytes = StringEncoding.decodeBase64(id); + return of(bytes, IdType.UUID); + case STRING: + return of(id); + default: + throw new AssertionError("Invalid id type " + type); + } + } + + public final static String asStoredString(Id id) { + switch (id.type()) { + case LONG: + return LongEncoding.encodeSignedB64(id.asLong()); + case UUID: + return StringEncoding.encodeBase64(id.asBytes()); + case STRING: + return id.asString(); + default: + throw new AssertionError("Invalid id type " + id.type()); + } + } + + public final static IdType idType(Id id) { + if (id instanceof LongId) { + return IdType.LONG; + } + if (id instanceof UuidId) { + return IdType.UUID; + } + if (id instanceof StringId) { + return IdType.STRING; + } + + return IdType.UNKNOWN; + } + + private final static int compareType(Id id1, Id id2) { + return idType(id1).ordinal() - idType(id2).ordinal(); + } + + /****************************** id defines ******************************/ + + public static final class StringId implements Id { + + private final String id; + + public StringId(String id) { + E.checkArgument(!id.isEmpty(), "The id can't be empty"); + this.id = id; + } + + public StringId(byte[] bytes) { + this.id = StringEncoding.decode(bytes); + } + + @Override + public IdType type() { + return IdType.STRING; + } + + @Override + public Object asObject() { + return this.id; + } + + @Override + public String asString() { + return this.id; + } + + @Override + public long asLong() { + return Long.parseLong(this.id); + } + + @Override + public byte[] asBytes() { + return StringEncoding.encode(this.id); + } + + @Override + public int length() { + return this.id.length(); + } + + @Override + public int compareTo(Id other) { + int cmp = compareType(this, other); + if (cmp != 0) { + return cmp; + } + return this.id.compareTo(other.asString()); + } + + @Override + public int hashCode() { + return this.id.hashCode(); + } + + @Override + public boolean equals(Object other) { + if (!(other instanceof StringId)) { + return false; + } + return this.id.equals(((StringId) other).id); + } + + @Override + public String toString() { + return this.id; + } + } + + public static final class LongId extends Number implements Id { + + private static final long serialVersionUID = -7732461469037400190L; + + private final long id; + + public LongId(long id) { + this.id = id; + } + + public LongId(byte[] bytes) { + this.id = NumericUtil.bytesToLong(bytes); + } + + @Override + public IdType type() { + return IdType.LONG; + } + + @Override + public Object asObject() { + return this.id; + } + + @Override + public String asString() { + // TODO: encode with base64 + return Long.toString(this.id); + } + + @Override + public long asLong() { + return this.id; + } + + @Override + public byte[] asBytes() { + return NumericUtil.longToBytes(this.id); + } + + @Override + public int length() { + return Long.BYTES; + } + + @Override + public int compareTo(Id other) { + int cmp = compareType(this, other); + if (cmp != 0) { + return cmp; + } + return Long.compare(this.id, other.asLong()); + } + + @Override + public int hashCode() { + return Long.hashCode(this.id); + } + + @Override + public boolean equals(Object other) { + if (!(other instanceof Number)) { + return false; + } + return this.id == ((Number) other).longValue(); + } + + @Override + public String toString() { + return String.valueOf(this.id); + } + + @Override + public int intValue() { + return (int) this.id; + } + + @Override + public long longValue() { + return this.id; + } + + @Override + public float floatValue() { + return this.id; + } + + @Override + public double doubleValue() { + return this.id; + } + } + + public static final class UuidId implements Id { + + private final UUID uuid; + + public UuidId(String string) { + this(StringEncoding.uuid(string)); + } + + public UuidId(byte[] bytes) { + this(fromBytes(bytes)); + } + + public UuidId(UUID uuid) { + E.checkArgument(uuid != null, "The uuid can't be null"); + this.uuid = uuid; + } + + @Override + public IdType type() { + return IdType.UUID; + } + + @Override + public Object asObject() { + return this.uuid; + } + + @Override + public String asString() { + return this.uuid.toString(); + } + + @Override + public long asLong() { + throw new UnsupportedOperationException(); + } + + @Override + public byte[] asBytes() { + BytesBuffer buffer = BytesBuffer.allocate(16); + buffer.writeLong(this.uuid.getMostSignificantBits()); + buffer.writeLong(this.uuid.getLeastSignificantBits()); + return buffer.bytes(); + } + + private static UUID fromBytes(byte[] bytes) { + E.checkArgument(bytes != null, "The UUID can't be null"); + BytesBuffer buffer = BytesBuffer.wrap(bytes); + long high = buffer.readLong(); + long low = buffer.readLong(); + return new UUID(high, low); + } + + @Override + public int length() { + return UUID_LENGTH; + } + + @Override + public int compareTo(Id other) { + E.checkNotNull(other, "compare id"); + int cmp = compareType(this, other); + if (cmp != 0) { + return cmp; + } + return this.uuid.compareTo(((UuidId) other).uuid); + } + + @Override + public int hashCode() { + return this.uuid.hashCode(); + } + + @Override + public boolean equals(Object other) { + if (!(other instanceof UuidId)) { + return false; + } + return this.uuid.equals(((UuidId) other).uuid); + } + + @Override + public String toString() { + return this.uuid.toString(); + } + } + + /** + * This class is just used by backend store for wrapper object as Id + */ + private static final class ObjectId implements Id { + + private final Object object; + + public ObjectId(Object object) { + E.checkNotNull(object, "object"); + this.object = object; + } + + @Override + public IdType type() { + return IdType.UNKNOWN; + } + + @Override + public Object asObject() { + return this.object; + } + + @Override + public String asString() { + throw new UnsupportedOperationException(); + } + + @Override + public long asLong() { + throw new UnsupportedOperationException(); + } + + @Override + public byte[] asBytes() { + throw new UnsupportedOperationException(); + } + + @Override + public int length() { + throw new UnsupportedOperationException(); + } + + @Override + public int compareTo(Id o) { + throw new UnsupportedOperationException(); + } + + @Override + public int hashCode() { + return this.object.hashCode(); + } + + @Override + public boolean equals(Object other) { + if (!(other instanceof ObjectId)) { + return false; + } + return Objects.equals(this.object, ((ObjectId) other).object); + } + + @Override + public String toString() { + return this.object.toString(); + } + } +} diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/LZ4Util.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/LZ4Util.java new file mode 100644 index 000000000..266833d83 --- /dev/null +++ b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/LZ4Util.java @@ -0,0 +1,74 @@ +package com.baidu.hugegraph.serializer.direct.util; + + +import net.jpountz.lz4.LZ4BlockOutputStream; +import net.jpountz.lz4.LZ4Compressor; +import net.jpountz.lz4.LZ4Factory; +import net.jpountz.lz4.LZ4FastDecompressor; +import net.jpountz.lz4.LZ4BlockInputStream; + +import java.io.ByteArrayInputStream; +import java.io.IOException; + +public class LZ4Util { + + protected static final float DEFAULT_BUFFER_RATIO = 1.5f; + + public static BytesBuffer compress(byte[] bytes, int blockSize) { + return compress(bytes, blockSize, DEFAULT_BUFFER_RATIO); + } + + public static BytesBuffer compress(byte[] bytes, int blockSize, + float bufferRatio) { + float ratio = bufferRatio <= 0.0F ? DEFAULT_BUFFER_RATIO : bufferRatio; + LZ4Factory factory = LZ4Factory.fastestInstance(); + LZ4Compressor compressor = factory.fastCompressor(); + int initBufferSize = Math.round(bytes.length / ratio); + BytesBuffer buf = new BytesBuffer(initBufferSize); + LZ4BlockOutputStream lz4Output = new LZ4BlockOutputStream( + buf, blockSize, compressor); + try { + lz4Output.write(bytes); + lz4Output.close(); + } catch (IOException e) { + throw new HugeException("Failed to compress", e); + } + /* + * If need to perform reading outside the method, + * remember to call forReadWritten() + */ + return buf; + } + + public static BytesBuffer decompress(byte[] bytes, int blockSize) { + return decompress(bytes, blockSize, DEFAULT_BUFFER_RATIO); + } + + public static BytesBuffer decompress(byte[] bytes, int blockSize, + float bufferRatio) { + float ratio = bufferRatio <= 0.0F ? DEFAULT_BUFFER_RATIO : bufferRatio; + LZ4Factory factory = LZ4Factory.fastestInstance(); + LZ4FastDecompressor decompressor = factory.fastDecompressor(); + ByteArrayInputStream bais = new ByteArrayInputStream(bytes); + int initBufferSize = Math.min(Math.round(bytes.length * ratio), + BytesBuffer.MAX_BUFFER_CAPACITY); + BytesBuffer buf = new BytesBuffer(initBufferSize); + LZ4BlockInputStream lzInput = new LZ4BlockInputStream(bais, + decompressor); + int count; + byte[] buffer = new byte[blockSize]; + try { + while ((count = lzInput.read(buffer)) != -1) { + buf.write(buffer, 0, count); + } + lzInput.close(); + } catch (IOException e) { + throw new HugeException("Failed to decompress", e); + } + /* + * If need to perform reading outside the method, + * remember to call forReadWritten() + */ + return buf; + } +} diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/SplicingIdGenerator.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/SplicingIdGenerator.java index 53001cdaa..ab504fb15 100644 --- a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/SplicingIdGenerator.java +++ b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/SplicingIdGenerator.java @@ -22,8 +22,6 @@ import java.util.Arrays; import java.util.List; -import com.baidu.hugegraph.serializer.direct.struct.HugeVertex; -import com.baidu.hugegraph.structure.HugeVertex; import com.baidu.hugegraph.util.IdUtil; /** @@ -59,16 +57,16 @@ public static SplicingIdGenerator instance() { /** * Generate a string id of HugeVertex from Vertex name */ - public Id generate(HugeVertex vertex) { - /* - * Hash for row-key which will be evenly distributed. - * We can also use LongEncoding.encode() to encode the int/long hash - * if needed. - * id = String.format("%s%s%s", HashUtil.hash(id), ID_SPLITOR, id); - */ - // TODO: use binary Id with binary fields instead of string id - return splicing(vertex.schemaLabel().id().asString(), vertex.name()); - } +// public Id generate(HugeVertex vertex) { +// /* +// * Hash for row-key which will be evenly distributed. +// * We can also use LongEncoding.encode() to encode the int/long hash +// * if needed. +// * id = String.format("%s%s%s", HashUtil.hash(id), ID_SPLITOR, id); +// */ +// // TODO: use binary Id with binary fields instead of string id +// return splicing(vertex.schemaLabel().id().asString(), vertex.name()); +// } /** * Concat multiple ids into one composite id with IDS_SPLITOR diff --git a/pom.xml b/pom.xml index 0437b4de3..117e8f9e7 100644 --- a/pom.xml +++ b/pom.xml @@ -60,6 +60,7 @@ 2.2.1 3.1.2 2.12 + 2.2.3 From 9d8e0ed0f4d62aab1c50c8486fb258af9ae13f0b Mon Sep 17 00:00:00 2001 From: imbajin Date: Wed, 26 Oct 2022 19:30:27 +0800 Subject: [PATCH 3/9] Update pom.xml --- hugegraph-client/pom.xml | 3 --- 1 file changed, 3 deletions(-) diff --git a/hugegraph-client/pom.xml b/hugegraph-client/pom.xml index 762f81ad7..60d4594c9 100644 --- a/hugegraph-client/pom.xml +++ b/hugegraph-client/pom.xml @@ -38,19 +38,16 @@ hugegraph-common ${hugegraph.common.version} - org.lz4 lz4-java 1.4.0 - org.apache.hbase hbase-client ${hbase.version} - org.glassfish.jersey.containers jersey-container-servlet From 91554c9da32dc365727a20b1b685a2d900b7e65a Mon Sep 17 00:00:00 2001 From: JackyYangPassion Date: Wed, 26 Oct 2022 22:30:11 +0800 Subject: [PATCH 4/9] fix codestyle --- .../serializer/direct/RocksDBSerializer.java | 2 +- .../serializer/direct/util/DataTypeUtil.java | 332 ------------------ 2 files changed, 1 insertion(+), 333 deletions(-) delete mode 100644 hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/DataTypeUtil.java diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/RocksDBSerializer.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/RocksDBSerializer.java index 46f07a722..05f3ea80b 100644 --- a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/RocksDBSerializer.java +++ b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/RocksDBSerializer.java @@ -26,5 +26,5 @@ * And they will be transferred to bytes directly **/ public class RocksDBSerializer { - + // TODO: Support write RocksDB directly } diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/DataTypeUtil.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/DataTypeUtil.java deleted file mode 100644 index a597b09a9..000000000 --- a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/DataTypeUtil.java +++ /dev/null @@ -1,332 +0,0 @@ -/* - * Copyright 2017 HugeGraph Authors - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with this - * work for additional information regarding copyright ownership. The ASF - * licenses this file to You under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. - */ - -package com.baidu.hugegraph.serializer.direct.util; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Date; -import java.util.List; -import java.util.Set; -import java.util.UUID; -// -//import com.baidu.hugegraph.loader.constant.Constants; -//import com.baidu.hugegraph.loader.source.AbstractSource; -//import com.baidu.hugegraph.loader.source.InputSource; -//import com.baidu.hugegraph.loader.source.file.FileSource; -//import com.baidu.hugegraph.loader.source.file.ListFormat; -//import com.baidu.hugegraph.structure.constant.Cardinality; -//import com.baidu.hugegraph.structure.constant.DataType; -//import com.baidu.hugegraph.structure.schema.PropertyKey; -//import com.baidu.hugegraph.util.DateUtil; -//import com.baidu.hugegraph.util.E; -//import com.baidu.hugegraph.util.InsertionOrderUtil; -//import com.baidu.hugegraph.util.ReflectionUtil; -//import com.google.common.base.Splitter; -//import com.google.common.collect.ImmutableSet; - -public final class DataTypeUtil { -// -// private static final Set ACCEPTABLE_TRUE = ImmutableSet.of( -// "true", "1", "yes", "y" -// ); -// private static final Set ACCEPTABLE_FALSE = ImmutableSet.of( -// "false", "0", "no", "n" -// ); -// -// public static boolean isSimpleValue(Object value) { -// if (value == null) { -// return false; -// } -// return ReflectionUtil.isSimpleType(value.getClass()); -// } -// -// public static Object convert(Object value, PropertyKey propertyKey, InputSource source) { -// E.checkArgumentNotNull(value, -// "The value to be converted can't be null"); -// -// String key = propertyKey.name(); -// DataType dataType = propertyKey.dataType(); -// Cardinality cardinality = propertyKey.cardinality(); -// switch (cardinality) { -// case SINGLE: -// return parseSingleValue(key, value, dataType, source); -// case SET: -// case LIST: -// return parseMultiValues(key, value, dataType, cardinality, source); -// default: -// throw new AssertionError(String.format( -// "Unsupported cardinality: '%s'", cardinality)); -// } -// } -// -// @SuppressWarnings("unchecked") -// public static List splitField(String key, Object rawColumnValue, InputSource source) { -// E.checkArgument(rawColumnValue != null, -// "The value to be split can't be null"); -// if (rawColumnValue instanceof Collection) { -// return (List) rawColumnValue; -// } -// // TODO: Seems a bit violent -// String rawValue = rawColumnValue.toString(); -// return split(key, rawValue, source); -// } -// -// public static long parseNumber(String key, Object rawValue) { -// if (rawValue instanceof Number) { -// return ((Number) rawValue).longValue(); -// } else if (rawValue instanceof String) { -// // trim() is a little time-consuming -// return parseLong(((String) rawValue).trim()); -// } -// throw new IllegalArgumentException(String.format( -// "The value(key='%s') must can be casted to Long, " + -// "but got '%s'(%s)", key, rawValue, rawValue.getClass().getName())); -// } -// -// public static UUID parseUUID(String key, Object rawValue) { -// if (rawValue instanceof UUID) { -// return (UUID) rawValue; -// } else if (rawValue instanceof String) { -// String value = ((String) rawValue).trim(); -// if (value.contains("-")) { -// return UUID.fromString(value); -// } -// // UUID represented by hex string -// E.checkArgument(value.length() == 32, "Invalid UUID value(key='%s') '%s'", key, value); -// String high = value.substring(0, 16); -// String low = value.substring(16); -// return new UUID(Long.parseUnsignedLong(high, 16), Long.parseUnsignedLong(low, 16)); -// } -// throw new IllegalArgumentException(String.format( -// "Failed to convert value(key='%s') '%s'(%s) to UUID", -// key, rawValue, rawValue.getClass())); -// } -// -// private static Object parseSingleValue(String key, Object rawValue, -// DataType dataType, -// InputSource source) { -// // Trim space if raw value is string -// Object value = rawValue; -// if (rawValue instanceof String) { -// value = ((String) rawValue).trim(); -// } -// if (dataType.isNumber()) { -// return parseNumber(key, value, dataType); -// } else if (dataType.isBoolean()) { -// return parseBoolean(key, value); -// } else if (dataType.isDate()) { -// E.checkState(source instanceof FileSource, -// "Only accept FileSource when convert String value " + -// "to Date, but got '%s'", source.getClass().getName()); -// String dateFormat = ((FileSource) source).dateFormat(); -// String timeZone = ((FileSource) source).timeZone(); -// return parseDate(key, value, dateFormat, timeZone); -// } else if (dataType.isUUID()) { -// return parseUUID(key, value); -// } -// E.checkArgument(checkDataType(key, value, dataType), -// "The value(key='%s') '%s'(%s) is not match with " + -// "data type %s and can't convert to it", -// key, value, value.getClass(), dataType); -// return value; -// } -// -// /** -// * collection format: "obj1,obj2,...,objn" or "[obj1,obj2,...,objn]" ..etc -// * TODO: After parsing to json, the order of the collection changed -// * in some cases (such as list) -// **/ -// private static Object parseMultiValues(String key, Object values, -// DataType dataType, -// Cardinality cardinality, -// InputSource source) { -// // JSON file should not parse again -// if (values instanceof Collection && -// checkCollectionDataType(key, (Collection) values, dataType)) { -// return values; -// } -// -// E.checkState(values instanceof String, -// "The value(key='%s') must be String type, " + -// "but got '%s'(%s)", key, values); -// String rawValue = (String) values; -// List valueColl = split(key, rawValue, source); -// Collection results = cardinality == Cardinality.LIST ? -// InsertionOrderUtil.newList() : -// InsertionOrderUtil.newSet(); -// valueColl.forEach(value -> { -// results.add(parseSingleValue(key, value, dataType, source)); -// }); -// E.checkArgument(checkCollectionDataType(key, results, dataType), -// "Not all collection elems %s match with data type %s", -// results, dataType); -// return results; -// } -// -// private static Boolean parseBoolean(String key, Object rawValue) { -// if (rawValue instanceof Boolean) { -// return (Boolean) rawValue; -// } -// if (rawValue instanceof String) { -// String value = ((String) rawValue).toLowerCase(); -// if (ACCEPTABLE_TRUE.contains(value)) { -// return true; -// } else if (ACCEPTABLE_FALSE.contains(value)) { -// return false; -// } else { -// throw new IllegalArgumentException(String.format( -// "Failed to convert '%s'(key='%s') to Boolean, " + -// "the acceptable boolean strings are %s or %s", -// key, rawValue, ACCEPTABLE_TRUE, ACCEPTABLE_FALSE)); -// } -// } -// throw new IllegalArgumentException(String.format( -// "Failed to convert value(key='%s') '%s'(%s) to Boolean", -// key, rawValue, rawValue.getClass())); -// } -// -// private static Number parseNumber(String key, Object value, -// DataType dataType) { -// E.checkState(dataType.isNumber(), -// "The target data type must be number"); -// -// if (dataType.clazz().isInstance(value)) { -// return (Number) value; -// } -// try { -// switch (dataType) { -// case BYTE: -// return Byte.valueOf(value.toString()); -// case INT: -// return Integer.valueOf(value.toString()); -// case LONG: -// return parseLong(value.toString()); -// case FLOAT: -// return Float.valueOf(value.toString()); -// case DOUBLE: -// return Double.valueOf(value.toString()); -// default: -// throw new AssertionError(String.format( -// "Number type only contains Byte, Integer, " + -// "Long, Float, Double, but got %s", -// dataType.clazz())); -// } -// } catch (NumberFormatException e) { -// throw new IllegalArgumentException(String.format( -// "Failed to convert value(key=%s) '%s'(%s) to Number", -// key, value, value.getClass()), e); -// } -// } -// -// private static long parseLong(String rawValue) { -// if (rawValue.startsWith("-")) { -// return Long.parseLong(rawValue); -// } else { -// return Long.parseUnsignedLong(rawValue); -// } -// } -// -// private static Date parseDate(String key, Object value, -// String dateFormat, String timeZone) { -// if (value instanceof Date) { -// return (Date) value; -// } -// if (value instanceof Number) { -// return new Date(((Number) value).longValue()); -// } else if (value instanceof String) { -// if (Constants.TIMESTAMP.equals(dateFormat)) { -// try { -// long timestamp = Long.parseLong((String) value); -// return new Date(timestamp); -// } catch (NumberFormatException e) { -// throw new IllegalArgumentException(String.format( -// "Invalid timestamp value '%s'", value)); -// } -// } else { -// return DateUtil.parse((String) value, dateFormat, timeZone); -// } -// } -// throw new IllegalArgumentException(String.format( -// "Failed to convert value(key='%s') '%s'(%s) to Date", -// key, value, value.getClass())); -// } -// -// private static List split(String key, String rawValue, -// InputSource source) { -// List valueColl = new ArrayList<>(); -// if (rawValue.isEmpty()) { -// return valueColl; -// } -// E.checkState(AbstractSource.class.isAssignableFrom(source.getClass()), -// "Only accept AbstractSource when parse multi values, " + -// "but got '%s'", source.getClass().getName()); -// ListFormat listFormat = ((AbstractSource) source).listFormat(); -// E.checkArgumentNotNull(listFormat, "The list_format must be set when " + -// "parse list or set values"); -// -// String startSymbol = listFormat.startSymbol(); -// String endSymbol = listFormat.endSymbol(); -// E.checkArgument(rawValue.length() >= -// startSymbol.length() + endSymbol.length(), -// "The value(key='%s') '%s' length(%s) must be >= " + -// "start symbol '%s' + end symbol '%s' length", -// key, rawValue, rawValue.length(), -// startSymbol, endSymbol); -// E.checkArgument(rawValue.startsWith(startSymbol) && -// rawValue.endsWith(endSymbol), -// "The value(key='%s') must start with '%s' and " + -// "end with '%s', but got '%s'", -// key, startSymbol, endSymbol, rawValue); -// rawValue = rawValue.substring(startSymbol.length(), -// rawValue.length() - endSymbol.length()); -// String elemDelimiter = listFormat.elemDelimiter(); -// Splitter.on(elemDelimiter).split(rawValue).forEach(value -> { -// if (!listFormat.ignoredElems().contains(value)) { -// valueColl.add(value); -// } -// }); -// return valueColl; -// } -// -// /** -// * Check type of the value valid -// */ -// private static boolean checkDataType(String key, Object value, -// DataType dataType) { -// if (value instanceof Number) { -// return parseNumber(key, value, dataType) != null; -// } -// return dataType.clazz().isInstance(value); -// } -// -// /** -// * Check type of all the values(may be some of list properties) valid -// */ -// private static boolean checkCollectionDataType(String key, -// Collection values, -// DataType dataType) { -// for (Object value : values) { -// if (!checkDataType(key, value, dataType)) { -// return false; -// } -// } -// return true; -// } -} From 4c01ca8e510f11e1636653ae770790c26a013cf5 Mon Sep 17 00:00:00 2001 From: imbajin Date: Wed, 26 Oct 2022 22:44:34 +0800 Subject: [PATCH 5/9] Update HBaseSerializer.java --- .../baidu/hugegraph/serializer/direct/HBaseSerializer.java | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/HBaseSerializer.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/HBaseSerializer.java index 82e6c3372..dcb796c73 100644 --- a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/HBaseSerializer.java +++ b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/HBaseSerializer.java @@ -13,6 +13,9 @@ import java.util.Arrays; import java.util.Map; +/** + * TODO: review later + */ public class HBaseSerializer { private int edgeLogicPartitions = 30; @@ -41,7 +44,7 @@ public byte[] getKeyBytes(GraphElement e) { buffer.writeShort(getPartition(HugeType.EDGE, IdGenerator.of(edge.sourceId()))); buffer.writeId(IdGenerator.of(edge.sourceId())); buffer.write(HugeType.EDGE_OUT.code()); - buffer.writeId(IdGenerator.of(graphSchema.getEdgeLabel(e.label()).id()));//出现错误 + buffer.writeId(IdGenerator.of(graphSchema.getEdgeLabel(e.label()).id())); //出现错误 buffer.writeStringWithEnding(""); buffer.writeId(IdGenerator.of(edge.targetId())); array = buffer.bytes(); @@ -52,7 +55,7 @@ public byte[] getKeyBytes(GraphElement e) { public byte[] getValueBytes(GraphElement e) { byte[] array = null; if(e.type() == "vertex"){ - int propsCount = e.properties().size() ;//vertex.sizeOfProperties(); + int propsCount = e.properties().size() ; //vertex.sizeOfProperties(); BytesBuffer buffer = BytesBuffer.allocate(8 + 16 * propsCount); buffer.writeId(IdGenerator.of(graphSchema.getVertexLabel(e.label()).id())); buffer.writeVInt(propsCount); From db618c095425fb037e10766aaa0289b4c394787f Mon Sep 17 00:00:00 2001 From: JackyYangPassion Date: Mon, 31 Oct 2022 15:22:44 +0800 Subject: [PATCH 6/9] add license --- .../serializer/direct/HBaseSerializer.java | 19 +++++++++++++++++++ .../serializer/direct/reuse/BytesDemo.java | 19 +++++++++++++++++++ .../serializer/direct/util/IdGenerator.java | 19 +++++++++++++++++++ .../serializer/direct/util/LZ4Util.java | 19 +++++++++++++++++++ 4 files changed, 76 insertions(+) diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/HBaseSerializer.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/HBaseSerializer.java index 82e6c3372..55f17082f 100644 --- a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/HBaseSerializer.java +++ b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/HBaseSerializer.java @@ -1,3 +1,22 @@ +/* + * Copyright 2017 HugeGraph Authors + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + package com.baidu.hugegraph.serializer.direct; import com.baidu.hugegraph.driver.HugeClient; diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/reuse/BytesDemo.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/reuse/BytesDemo.java index 1d29ea689..23625b4ae 100644 --- a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/reuse/BytesDemo.java +++ b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/reuse/BytesDemo.java @@ -1,3 +1,22 @@ +/* + * Copyright 2017 HugeGraph Authors + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + package com.baidu.hugegraph.serializer.direct.reuse; import java.io.IOException; diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/IdGenerator.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/IdGenerator.java index df5a4e290..3e6c1e966 100644 --- a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/IdGenerator.java +++ b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/IdGenerator.java @@ -1,3 +1,22 @@ +/* + * Copyright 2017 HugeGraph Authors + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + package com.baidu.hugegraph.serializer.direct.util; diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/LZ4Util.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/LZ4Util.java index 266833d83..32684369c 100644 --- a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/LZ4Util.java +++ b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/LZ4Util.java @@ -1,3 +1,22 @@ +/* + * Copyright 2017 HugeGraph Authors + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + package com.baidu.hugegraph.serializer.direct.util; From a248d5667d6f1641f63dae14083cf96cf7ca33d1 Mon Sep 17 00:00:00 2001 From: JackyYangPassion Date: Mon, 31 Oct 2022 15:35:07 +0800 Subject: [PATCH 7/9] fix license duplicated --- .../serializer/direct/util/StringEncoding.java | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/StringEncoding.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/StringEncoding.java index efe3340de..5668f14af 100644 --- a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/StringEncoding.java +++ b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/util/StringEncoding.java @@ -16,19 +16,6 @@ * License for the specific language governing permissions and limitations * under the License. */ -// Copyright 2017 JanusGraph Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. package com.baidu.hugegraph.serializer.direct.util; From af4cef142a9836ab8d71b31812a6bad4189ee239 Mon Sep 17 00:00:00 2001 From: imbajin Date: Tue, 1 Nov 2022 17:28:14 +0800 Subject: [PATCH 8/9] Update RocksDBSerializer.java --- .../baidu/hugegraph/serializer/direct/RocksDBSerializer.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/RocksDBSerializer.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/RocksDBSerializer.java index 05f3ea80b..de964f3cb 100644 --- a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/RocksDBSerializer.java +++ b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/RocksDBSerializer.java @@ -20,6 +20,8 @@ package com.baidu.hugegraph.serializer.direct; /** + * TODO: review later + * * In this serializer, we only support normal type now: * - number * - string From e7f4dab487024ceb666e12e3fa44b26696cf72fd Mon Sep 17 00:00:00 2001 From: JackyYangPassion Date: Wed, 2 Nov 2022 14:30:19 +0800 Subject: [PATCH 9/9] rm HBase pom --- hugegraph-client/pom.xml | 5 --- .../serializer/direct/reuse/BytesDemo.java | 34 ++----------------- pom.xml | 1 - 3 files changed, 2 insertions(+), 38 deletions(-) diff --git a/hugegraph-client/pom.xml b/hugegraph-client/pom.xml index 60d4594c9..99505558b 100644 --- a/hugegraph-client/pom.xml +++ b/hugegraph-client/pom.xml @@ -43,11 +43,6 @@ lz4-java 1.4.0 - - org.apache.hbase - hbase-client - ${hbase.version} - org.glassfish.jersey.containers jersey-container-servlet diff --git a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/reuse/BytesDemo.java b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/reuse/BytesDemo.java index 23625b4ae..dc3d99678 100644 --- a/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/reuse/BytesDemo.java +++ b/hugegraph-client/src/main/java/com/baidu/hugegraph/serializer/direct/reuse/BytesDemo.java @@ -30,18 +30,10 @@ import com.baidu.hugegraph.serializer.direct.RocksDBSerializer; import com.baidu.hugegraph.structure.graph.Edge; import com.baidu.hugegraph.structure.graph.Vertex; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseConfiguration; -import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.client.Connection; -import org.apache.hadoop.hbase.client.ConnectionFactory; -import org.apache.hadoop.hbase.client.Put; -import org.apache.hadoop.hbase.client.Table; -import org.apache.hadoop.hbase.util.Bytes; /** * @author jin - * This class is a demo for rocksdb put(rowkey, values) which use Client-Side's graph struct + * This class is a demo for rocksdb/HBase put(rowkey, values) which use Client-Side's graph struct * And we don't need to construct the graph element, just use it and transfer them to bytes array * instead of json format */ @@ -195,29 +187,7 @@ boolean sendRpcToHBase(String type, byte[] rowkey, byte[] values) { boolean put (String type, byte[] rowkey, byte[] values) throws IOException { - Configuration config = HBaseConfiguration.create(); - // Our json records sometimes are very big, we have - // disable the maxsize check on the keyvalue. - config.set("hbase.zookeeper.quorum", "localhost"); - config.set("hbase.zookeeper.property.clientPort", "2181"); - - - Connection conn = ConnectionFactory.createConnection(config); - Table htable = null ; - if (type.equals("vertex")) { - htable = conn.getTable(TableName.valueOf("hugegraph12p:g_v")); - } else if (type.equals("edge")) { - htable = conn.getTable(TableName.valueOf("hugegraph12p:g_oe")); - } - - Put put = new Put(rowkey); - put.addColumn(Bytes.toBytes("f"), - Bytes.toBytes(""), - values); - htable.put(put); - htable.close(); - - + // TODO: put to HBase return true; } diff --git a/pom.xml b/pom.xml index dec3d8044..d729ecb14 100644 --- a/pom.xml +++ b/pom.xml @@ -60,7 +60,6 @@ 2.2.1 3.1.2 2.12 - 2.2.3 3.0.0 true true