Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -26,14 +26,17 @@
import io.confluent.kafka.schemaregistry.avro.AvroSchema;
import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient;
import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient;
import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.DecoderFactory;
import org.apache.druid.java.util.common.RE;
import org.apache.druid.java.util.common.parsers.ParseException;

import javax.annotation.Nullable;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.List;
import java.util.Map;
Expand Down Expand Up @@ -70,18 +73,32 @@ public SchemaRegistryBasedAvroBytesDecoder(
@Override
public GenericRecord parse(ByteBuffer bytes)
{
int length = bytes.limit() - 1 - 4;
if (length < 0) {
throw new ParseException("Failed to decode avro message, not enough bytes to decode (%s)", bytes.limit());
}

bytes.get(); // ignore first \0 byte
int id = bytes.getInt(); // extract schema registry id
int offset = bytes.position() + bytes.arrayOffset();
Schema schema;

try {
bytes.get(); // ignore first \0 byte
int id = bytes.getInt(); // extract schema registry id
int length = bytes.limit() - 1 - 4;
int offset = bytes.position() + bytes.arrayOffset();
ParsedSchema parsedSchema = registry.getSchemaById(id);
Schema schema = parsedSchema instanceof AvroSchema ? ((AvroSchema) parsedSchema).rawSchema() : null;
DatumReader<GenericRecord> reader = new GenericDatumReader<>(schema);
schema = parsedSchema instanceof AvroSchema ? ((AvroSchema) parsedSchema).rawSchema() : null;
}
catch (IOException | RestClientException ex) {
throw new RE(ex, "Failed to get Avro schema: %s", id);
}
if (schema == null) {
throw new RE("Failed to find Avro schema: %s", id);
}
DatumReader<GenericRecord> reader = new GenericDatumReader<>(schema);
try {
return reader.read(null, DecoderFactory.get().binaryDecoder(bytes.array(), offset, length, null));
}
catch (Exception e) {
throw new ParseException(e, "Fail to decode avro message!");
throw new ParseException(e, "Fail to decode Avro message for schema: %s!", id);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
package org.apache.druid.data.input.avro;

import com.fasterxml.jackson.databind.ObjectMapper;
import io.confluent.kafka.schemaregistry.ParsedSchema;
import io.confluent.kafka.schemaregistry.avro.AvroSchema;
import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient;
import org.apache.avro.Schema;
Expand All @@ -29,6 +30,7 @@
import org.apache.avro.specific.SpecificDatumWriter;
import org.apache.druid.data.input.AvroStreamInputRowParserTest;
import org.apache.druid.data.input.SomeAvroDatum;
import org.apache.druid.java.util.common.RE;
import org.apache.druid.java.util.common.parsers.ParseException;
import org.junit.Assert;
import org.junit.Before;
Expand Down Expand Up @@ -96,40 +98,60 @@ public void testConfig() throws Exception
public void testParse() throws Exception
{
// Given
Mockito.when(registry.getSchemaById(ArgumentMatchers.eq(1234))).thenReturn(new AvroSchema(SomeAvroDatum.getClassSchema()));
Mockito.when(registry.getSchemaById(ArgumentMatchers.eq(1234)))
.thenReturn(new AvroSchema(SomeAvroDatum.getClassSchema()));
GenericRecord someAvroDatum = AvroStreamInputRowParserTest.buildSomeAvroDatum();
Schema schema = SomeAvroDatum.getClassSchema();
byte[] bytes = getAvroDatum(schema, someAvroDatum);
ByteBuffer bb = ByteBuffer.allocate(bytes.length + 5).put((byte) 0).putInt(1234).put(bytes);
bb.rewind();
// When
GenericRecord actual = new SchemaRegistryBasedAvroBytesDecoder(registry).parse(bb);
// Then
Assert.assertEquals(someAvroDatum.get("id"), actual.get("id"));
new SchemaRegistryBasedAvroBytesDecoder(registry).parse(bb);
}

@Test(expected = ParseException.class)
public void testParseCorruptedNotEnoughBytesToEvenGetSchemaInfo()
{
// Given
ByteBuffer bb = ByteBuffer.allocate(2).put((byte) 0).put(1, (byte) 1);
bb.rewind();
// When
new SchemaRegistryBasedAvroBytesDecoder(registry).parse(bb);
}

@Test(expected = ParseException.class)
public void testParseCorrupted() throws Exception
public void testParseCorruptedPartial() throws Exception
{
// Given
Mockito.when(registry.getSchemaById(ArgumentMatchers.eq(1234))).thenReturn(new AvroSchema(SomeAvroDatum.getClassSchema()));
Mockito.when(registry.getSchemaById(ArgumentMatchers.eq(1234)))
.thenReturn(new AvroSchema(SomeAvroDatum.getClassSchema()));
GenericRecord someAvroDatum = AvroStreamInputRowParserTest.buildSomeAvroDatum();
Schema schema = SomeAvroDatum.getClassSchema();
byte[] bytes = getAvroDatum(schema, someAvroDatum);
ByteBuffer bb = ByteBuffer.allocate(bytes.length + 5).put((byte) 0).putInt(1234).put((bytes), 5, 10);
ByteBuffer bb = ByteBuffer.allocate(4 + 5).put((byte) 0).putInt(1234).put(bytes, 5, 4);
bb.rewind();
// When
new SchemaRegistryBasedAvroBytesDecoder(registry).parse(bb);
}

@Test(expected = ParseException.class)
@Test(expected = RE.class)
public void testParseWrongSchemaType() throws Exception
{
// Given
Mockito.when(registry.getSchemaById(ArgumentMatchers.eq(1234))).thenReturn(Mockito.mock(ParsedSchema.class));
ByteBuffer bb = ByteBuffer.allocate(5).put((byte) 0).putInt(1234);
bb.rewind();
// When
new SchemaRegistryBasedAvroBytesDecoder(registry).parse(bb);
}

@Test(expected = RE.class)
public void testParseWrongId() throws Exception
{
// Given
Mockito.when(registry.getSchemaById(ArgumentMatchers.anyInt())).thenThrow(new IOException("no pasaran"));
GenericRecord someAvroDatum = AvroStreamInputRowParserTest.buildSomeAvroDatum();
Schema schema = SomeAvroDatum.getClassSchema();
byte[] bytes = getAvroDatum(schema, someAvroDatum);
ByteBuffer bb = ByteBuffer.allocate(bytes.length + 5).put((byte) 0).putInt(1234).put(bytes);
ByteBuffer bb = ByteBuffer.allocate(5).put((byte) 0).putInt(1234);
bb.rewind();
// When
new SchemaRegistryBasedAvroBytesDecoder(registry).parse(bb);
}
Expand Down
108 changes: 67 additions & 41 deletions integration-tests/docker/docker-compose.base.yml
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ networks:
- subnet: 172.172.172.0/24

services:
### supporting infra:
### always there supporting infra:
druid-zookeeper-kafka:
image: druid/cluster
container_name: druid-zookeeper-kafka
Expand Down Expand Up @@ -71,45 +71,6 @@ services:
env_file:
- ./environment-configs/common

druid-it-hadoop:
image: druid-it/hadoop:2.8.5
container_name: druid-it-hadoop
ports:
- 2049:2049
- 2122:2122
- 8020:8020
- 8021:8021
- 8030:8030
- 8031:8031
- 8032:8032
- 8033:8033
- 8040:8040
- 8042:8042
- 8088:8088
- 8443:8443
- 9000:9000
- 10020:10020
- 19888:19888
- 34455:34455
- 50010:50010
- 50020:50020
- 50030:50030
- 50060:50060
- 50070:50070
- 50075:50075
- 50090:50090
- 51111:51111
networks:
druid-it-net:
ipv4_address: 172.172.172.101
privileged: true
volumes:
- ${HOME}/shared:/shared
- ./../src/test/resources:/resources
hostname: "druid-it-hadoop"
command: "bash -c 'echo Start druid-it-hadoop container... && \
/etc/bootstrap.sh && \
tail -f /dev/null'"

### overlords
druid-overlord:
Expand Down Expand Up @@ -357,12 +318,54 @@ services:
- ./environment-configs/common
- ./environment-configs/router-custom-check-tls

### optional supporting infra
druid-it-hadoop:
image: druid-it/hadoop:2.8.5
container_name: druid-it-hadoop
ports:
- 2049:2049
- 2122:2122
- 8020:8020
- 8021:8021
- 8030:8030
- 8031:8031
- 8032:8032
- 8033:8033
- 8040:8040
- 8042:8042
- 8088:8088
- 8443:8443
- 9000:9000
- 10020:10020
- 19888:19888
- 34455:34455
- 50010:50010
- 50020:50020
- 50030:50030
- 50060:50060
- 50070:50070
- 50075:50075
- 50090:50090
- 51111:51111
networks:
druid-it-net:
ipv4_address: 172.172.172.101
privileged: true
volumes:
- ${HOME}/shared:/shared
- ./../src/test/resources:/resources
hostname: "druid-it-hadoop"
command: "bash -c 'echo Start druid-it-hadoop container... && \
/etc/bootstrap.sh && \
tail -f /dev/null'"


druid-openldap:
image: osixia/openldap:1.4.0
container_name: druid-openldap
networks:
druid-it-net:
ipv4_address: 172.172.172.74
ipv4_address: 172.172.172.102
ports:
- 8389:389
- 8636:636
Expand All @@ -373,3 +376,26 @@ services:
env_file:
- ./environment-configs/common
command: --copy-service


schema-registry:
image: confluentinc/cp-schema-registry:5.5.1
container_name: schema-registry
ports:
- 8085:8085
networks:
druid-it-net:
ipv4_address: 172.172.172.103
volumes:
- ${HOME}/shared:/shared
- ./schema-registry/jaas_config.file:/usr/lib/druid/conf/jaas_config.file
- ./schema-registry/password-file:/usr/lib/druid/conf/password-file
privileged: true
environment:
SCHEMA_REGISTRY_HOST_NAME: schema-registry
SCHEMA_REGISTRY_LISTENERS: "http://0.0.0.0:8085"
SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: druid-zookeeper-kafka:9092
SCHEMA_REGISTRY_AUTHENTICATION_METHOD: BASIC
SCHEMA_REGISTRY_AUTHENTICATION_REALM: druid
SCHEMA_REGISTRY_AUTHENTICATION_ROLES: users
SCHEMA_REGISTRY_OPTS: -Djava.security.auth.login.config=/usr/lib/druid/conf/jaas_config.file
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

version: "2.2"
services:
schema-registry:
extends:
file: docker-compose.base.yml
service: schema-registry
depends_on:
- druid-zookeeper-kafka
links:
- druid-zookeeper-kafka:druid-zookeeper-kafka
- druid-coordinator:druid-coordinator
- druid-broker:druid-broker
- druid-historical:druid-historical
- druid-indexer:druid-indexer
29 changes: 29 additions & 0 deletions integration-tests/docker/docker-compose.schema-registry.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

version: "2.2"
services:
schema-registry:
extends:
file: docker-compose.base.yml
service: schema-registry
depends_on:
- druid-zookeeper-kafka
links:
- druid-zookeeper-kafka:druid-zookeeper-kafka
- druid-coordinator:druid-coordinator
- druid-broker:druid-broker
- druid-middlemanager:druid-middlemanager
- druid-historical:druid-historical
5 changes: 5 additions & 0 deletions integration-tests/docker/schema-registry/jaas_config.file
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
druid {
org.eclipse.jetty.jaas.spi.PropertyFileLoginModule required
file="/usr/lib/druid/conf/password-file"
debug="true";
};
1 change: 1 addition & 0 deletions integration-tests/docker/schema-registry/password-file
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
druid: diurd,users
Loading