Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 7 additions & 9 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -442,10 +442,6 @@ jobs:
docker exec -it druid-$v sh -c 'dmesg | tail -3' ;
done

- <<: *integration_batch_index
name: "(Compile=openjdk8, Run=openjdk8) batch index integration test with Indexer"
env: TESTNG_GROUPS='-Dgroups=batch-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer'

- &integration_input_format
name: "(Compile=openjdk8, Run=openjdk8) input format integration test"
stage: Tests - phase 2
Expand Down Expand Up @@ -689,11 +685,13 @@ jobs:
env: JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
script: ./it.sh travis Catalog

# Disabling BatchIndex test as it is failing with due to timeout, fixing it will be taken in a separate PR.
#- <<: *integration_tests_ex
# name: "(Compile=openjdk8, Run=openjdk8) batch index integration test with Indexer (new)"
# env: JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer'
# script: ./it.sh travis BatchIndex
- &integration_tests_ex
name: "(Compile=openjdk8, Run=openjdk8) batch index integration test with Indexer (new)"
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
env: JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer'
script: ./it.sh travis BatchIndex

# END - Integration tests for Compile with Java 8 and Run with Java 8

Expand Down
2 changes: 2 additions & 0 deletions distribution/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -225,6 +225,8 @@
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-multi-stage-query</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-catalog</argument>
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is this change related to this PR? If not, then should this be raised as a separate PR?

Copy link
Copy Markdown
Contributor Author

@abhagraw abhagraw Nov 18, 2022

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The docker containers created for the test fail with the following error, if this extension is not added -

2022-11-15T06:35:58,107 INFO [main] org.hibernate.validator.internal.util.Version - HV000001: Hibernate Validator 5.2.5.Final
Exception in thread "main" org.apache.druid.java.util.common.ISE: Extension [/usr/local/apache-druid-2022.12.0-iap-SNAPSHOT/extensions/druid-catalog] specified in "druid.extensions.loadList" didn't exist!?
	at org.apache.druid.guice.ExtensionsLoader.getExtensionFilesToLoad(ExtensionsLoader.java:183)
	at org.apache.druid.guice.ExtensionsLoader$ServiceLoadingFromExtensions.addAllFromFileSystem(ExtensionsLoader.java:292)
	at org.apache.druid.guice.ExtensionsLoader$ServiceLoadingFromExtensions.<init>(ExtensionsLoader.java:280)
	at org.apache.druid.guice.ExtensionsLoader$ServiceLoadingFromExtensions.<init>(ExtensionsLoader.java:268)
	at org.apache.druid.guice.ExtensionsLoader.lambda$getFromExtensions$0(ExtensionsLoader.java:139)
	at java.util.concurrent.ConcurrentHashMap.computeIfAbsent(ConcurrentHashMap.java:1660)
	at org.apache.druid.guice.ExtensionsLoader.getFromExtensions(ExtensionsLoader.java:137)
	at org.apache.druid.cli.Main.main(Main.java:98)```

<argument>-c</argument>
<argument>org.apache.druid.extensions:druid-protobuf-extensions</argument>
<argument>-c</argument>
<argument>org.apache.druid.extensions:mysql-metadata-storage</argument>
Expand Down
19 changes: 12 additions & 7 deletions integration-tests-ex/cases/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,7 @@
<groupId>org.jdbi</groupId>
<artifactId>jdbi</artifactId>
</dependency>
<dependency>
<dependency>
<groupId>org.apache.druid.extensions</groupId>
<artifactId>mysql-metadata-storage</artifactId>
<version>${project.parent.version}</version>
Expand Down Expand Up @@ -218,10 +218,15 @@
<artifactId>JUnitParams</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>javax.ws.rs</groupId>
<artifactId>jsr311-api</artifactId>
</dependency>
<dependency>
<groupId>javax.ws.rs</groupId>
<artifactId>jsr311-api</artifactId>
</dependency>
<dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-client</artifactId>
<version>5.4.0</version>
</dependency>
</dependencies>

<!-- Exclude ITs from surefire. Required because they end with "Test". -->
Expand Down Expand Up @@ -345,8 +350,8 @@
<goal>integration-test</goal>
</goals>
<configuration>
<!-- our tests are very verbose, let's keep the volume down -->
<redirectTestOutputToFile>true</redirectTestOutputToFile>
<!-- Turning on logs so that travis does not time out tests for not providing any output. -->
<redirectTestOutputToFile>False</redirectTestOutputToFile>
Comment thread
abhagraw marked this conversation as resolved.
<!-- Can run only one test category per Maven run. -->
<groups>org.apache.druid.testsEx.categories.${it.category}</groups>
</configuration>
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,121 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.apache.druid.testsEx.msq;

import com.google.inject.Inject;
import org.apache.commons.io.IOUtils;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.testing.utils.DataLoaderHelper;
import org.apache.druid.testing.utils.MsqTestQueryHelper;
import org.apache.druid.testing.utils.TestQueryHelper;
import org.apache.druid.testsEx.indexer.AbstractITBatchIndexTest;

import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.util.Map;

public class AbstractITSQLBasedIngestion
{
public static final Logger LOG = new Logger(TestQueryHelper.class);
@Inject
private MsqTestQueryHelper msqHelper;

@Inject
protected TestQueryHelper queryHelper;

@Inject
private DataLoaderHelper dataLoaderHelper;

/**
* Reads file as utf-8 string and replace %%DATASOURCE%% with the provide datasource value.
*/
protected String getStringFromFileAndReplaceDatasource(String filePath, String datasource)
{
String fileString;
try {
InputStream is = AbstractITBatchIndexTest.class.getResourceAsStream(filePath);
fileString = IOUtils.toString(is, StandardCharsets.UTF_8);
}
catch (IOException e) {
throw new ISE(e, "could not read query file: %s", filePath);
}

fileString = StringUtils.replace(
fileString,
"%%DATASOURCE%%",
datasource
);

return fileString;
}

/**
* Reads native queries from a file and runs against the provided datasource.
*/
protected void doTestQuery(String queryFilePath, String dataSource)
{
try {
String query = getStringFromFileAndReplaceDatasource(queryFilePath, dataSource);
queryHelper.testQueriesFromString(query);
}
catch (Exception e) {
LOG.error(e, "Error while running test query");
throw new RuntimeException(e);
}
}

/**
* Sumits a sqlTask, waits for task completion and then runs test queries on ingested datasource.
*/
protected void submitTaskAnddoTestQuery(String sqlTask, String queryFilePath, String datasource,
Map<String, Object> msqContext) throws Exception
{
LOG.info("SqlTask - \n %s", sqlTask);

// Submit the tasks and wait for the datasource to get loaded
msqHelper.submitMsqTaskAndWaitForCompletion(
sqlTask,
msqContext
);

dataLoaderHelper.waitUntilDatasourceIsReady(datasource);
doTestQuery(queryFilePath, datasource);
}

/**
* Runs a MSQ ingest sql test.
*
* @param sqlFilePath path of file containing the sql query.
* @param queryFilePath path of file containing the native test queries to be run on the ingested datasource.
* @param datasource name of the datasource. %%DATASOURCE%% in the sql and queries will be replaced with this value.
* @param msqContext context parameters to be passed with MSQ API call.
*/
protected void runMSQTaskandTestQueries(String sqlFilePath, String queryFilePath, String datasource,
Map<String, Object> msqContext) throws Exception
{
LOG.info("Starting MSQ test for [%s, %s]", sqlFilePath, queryFilePath);

String sqlTask = getStringFromFileAndReplaceDatasource(sqlFilePath, datasource);
submitTaskAnddoTestQuery(sqlTask, queryFilePath, datasource, msqContext);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.apache.druid.testsEx.msq;

import junitparams.Parameters;
import org.apache.commons.io.FilenameUtils;
import org.apache.curator.shaded.com.google.common.collect.ImmutableMap;
import org.apache.druid.testsEx.categories.MultiStageQuery;
import org.apache.druid.testsEx.config.DruidTestRunner;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;

import java.util.Arrays;
import java.util.List;

@RunWith(DruidTestRunner.class)
@Category(MultiStageQuery.class)
public class ITSQLBasedBatchIngestion extends AbstractITSQLBasedIngestion
{
private static final String BATCH_INDEX_TASKS_DIR = "/multi-stage-query/batch-index/";

public static List<List<String>> test_cases()
{
return Arrays.asList(
Arrays.asList("msq_inline.sql", "json_path_index_queries.json"),
Arrays.asList("sparse_column_msq.sql", "sparse_column_msq.json"),
Arrays.asList("wikipedia_http_inputsource_msq.sql", "wikipedia_http_inputsource_queries.json"),
Arrays.asList("wikipedia_index_msq.sql", "wikipedia_index_queries.json"),
Arrays.asList("wikipedia_merge_index_task.sql", "wikipedia_index_queries.json"),
Arrays.asList("wikipedia_index_task_with_transform.sql", "wikipedia_index_queries_with_transform.json")
);

}

@Test
@Parameters(method = "test_cases")
public void testSQLBasedBatchIngestion(String sqlFileName, String queryFileName)
{
try {
runMSQTaskandTestQueries(BATCH_INDEX_TASKS_DIR + sqlFileName,
BATCH_INDEX_TASKS_DIR + queryFileName,
FilenameUtils.removeExtension(sqlFileName),
ImmutableMap.of("finalizeAggregations", false,
"maxNumTasks", 5,
"groupByEnableMultiValueUnnesting", false
));
}
catch (Exception e) {
Comment thread
abhagraw marked this conversation as resolved.
LOG.error(e, "Error while testing [%s, %s]", sqlFileName, queryFileName);
throw new RuntimeException(e);
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
[
{
"description": "timeseries",
"query": {
"queryType": "timeseries",
"dataSource": "%%DATASOURCE%%",
"intervals": [
"1000/3000"
],
"aggregations": [
{
"type": "longSum",
"name": "len",
"fieldName": "len"
},
{
"type": "longSum",
"name": "max",
"fieldName": "max"
},
{
"type": "longSum",
"name": "min",
"fieldName": "min"
},
{
"type": "longSum",
"name": "sum",
"fieldName": "sum"
}
],
"granularity": {
"type": "all"
}
},
"expectedResults": [
{
"timestamp": "2013-08-31T01:02:33.000Z",
"result": {
"sum": 10,
"min": 0,
"len": 5,
"max": 4
}
}
]
}
]

Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
REPLACE INTO "%%DATASOURCE%%" OVERWRITE ALL
WITH "source" AS (SELECT * FROM TABLE(
EXTERN(
'{"type":"inline","data":"{\"timestamp\": \"2013-08-31T01:02:33Z\", \"values\": [0,1,2,3,4] }"}',
'{"type":"json","flattenSpec":{"useFieldDiscovery":true,"fields":[{"type":"path","name":"len","expr":"$.values.length()"},{"type":"path","name":"min","expr":"$.values.min()"},{"type":"path","name":"max","expr":"$.values.max()"},{"type":"path","name":"sum","expr":"$.values.sum()"}]}}',
'[{"name":"timestamp","type":"string"},{"name":"len","type":"long"},{"name":"min","type":"long"},{"name":"max","type":"long"},{"name":"sum","type":"long"}]'
)
))
SELECT
TIME_PARSE("timestamp") AS __time,
"len",
"min",
"max",
"sum"
FROM "source"
GROUP BY 1, 2, 3, 4, 5
PARTITIONED BY HOUR
Loading