From 1d822d7218d2eadc04da0609874ea9ca17391a74 Mon Sep 17 00:00:00 2001 From: Aravindan Vijayan Date: Thu, 16 Apr 2020 20:10:35 -0700 Subject: [PATCH 1/6] HDDS-3411. Switch Recon SQL DB to Derby. --- .../src/main/resources/ozone-default.xml | 4 +- hadoop-ozone/recon-codegen/pom.xml | 6 +- .../recon/codegen/JooqCodeGenerator.java | 43 +++--- .../ozone/recon/codegen/ReconSqlDbConfig.java | 50 ++++++ .../ozone/recon/codegen/SqlDbUtils.java | 77 ++++++++++ hadoop-ozone/recon/pom.xml | 5 + .../ozone/recon/ReconControllerModule.java | 11 +- .../DefaultDataSourceProvider.java | 25 ++- .../persistence/JooqPersistenceModule.java | 2 +- .../persistence/AbstractReconSqlDBTest.java | 49 +++--- .../TestReconInternalSchemaDefinition.java | 4 +- .../TestReconWithDifferentSqlDBs.java | 142 ++++++++++++++++++ .../TestStatsSchemaDefinition.java | 4 +- .../TestUtilizationSchemaDefinition.java | 10 +- 14 files changed, 368 insertions(+), 64 deletions(-) create mode 100644 hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/ReconSqlDbConfig.java create mode 100644 hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/SqlDbUtils.java create mode 100644 hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/TestReconWithDifferentSqlDBs.java diff --git a/hadoop-hdds/common/src/main/resources/ozone-default.xml b/hadoop-hdds/common/src/main/resources/ozone-default.xml index c1c0930cb29f..d40da59e6a48 100644 --- a/hadoop-hdds/common/src/main/resources/ozone-default.xml +++ b/hadoop-hdds/common/src/main/resources/ozone-default.xml @@ -2354,7 +2354,7 @@ ozone.recon.sql.db.driver - org.sqlite.JDBC + org.apache.derby.jdbc.EmbeddedDriver OZONE, RECON Database driver class name available on the @@ -2363,7 +2363,7 @@ ozone.recon.sql.db.jdbc.url - jdbc:sqlite:${ozone.recon.db.dir}/ozone_recon_sqlite.db + jdbc:derby:${ozone.recon.db.dir}/ozone_recon_derby.db OZONE, RECON Ozone Recon SQL database jdbc url. diff --git a/hadoop-ozone/recon-codegen/pom.xml b/hadoop-ozone/recon-codegen/pom.xml index c26825ab5570..050a70a520a0 100644 --- a/hadoop-ozone/recon-codegen/pom.xml +++ b/hadoop-ozone/recon-codegen/pom.xml @@ -29,9 +29,9 @@ hadoop-ozone-common - org.xerial - sqlite-jdbc - 3.25.2 + org.apache.derby + derby + 10.14.2.0 com.google.inject.extensions diff --git a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/JooqCodeGenerator.java b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/JooqCodeGenerator.java index ad9b819c1941..47653a994a83 100644 --- a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/JooqCodeGenerator.java +++ b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/JooqCodeGenerator.java @@ -17,13 +17,19 @@ */ package org.hadoop.ozone.recon.codegen; +import static org.hadoop.ozone.recon.codegen.SqlDbUtils.DERBY_DRIVER_CLASS; +import static org.hadoop.ozone.recon.codegen.SqlDbUtils.createNewDerbyDatabase; + import java.io.File; +import java.nio.file.Paths; import java.sql.SQLException; import java.util.Set; import javax.sql.DataSource; import org.apache.commons.io.FileUtils; +import org.apache.derby.jdbc.EmbeddedDataSource; +import org.apache.hadoop.util.Time; import org.hadoop.ozone.recon.schema.ReconSchemaDefinition; import org.jooq.codegen.GenerationTool; import org.jooq.meta.jaxb.Configuration; @@ -35,7 +41,6 @@ import org.jooq.meta.jaxb.Target; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.sqlite.SQLiteDataSource; import com.google.inject.AbstractModule; import com.google.inject.Guice; @@ -55,10 +60,11 @@ public class JooqCodeGenerator { private static final Logger LOG = LoggerFactory.getLogger(JooqCodeGenerator.class); - private static final String SQLITE_DB = - System.getProperty("java.io.tmpdir") + "/recon-generated-schema"; - private static final String JDBC_URL = "jdbc:sqlite:" + SQLITE_DB; - + private static final String DB = Paths.get( + System.getProperty("java.io.tmpdir"), + "recon-generated-schema-" + Time.monotonicNow()).toString(); + public static final String RECON_SCHEMA_NAME = "RECON"; + private static final String JDBC_URL = "jdbc:derby:" + DB; private final Set allDefinitions; @Inject @@ -82,20 +88,18 @@ private void generateSourceCode(String outputDir) throws Exception { Configuration configuration = new Configuration() .withJdbc(new Jdbc() - .withDriver("org.sqlite.JDBC") + .withDriver(DERBY_DRIVER_CLASS) .withUrl(JDBC_URL) - .withUser("sa") - .withPassword("sa")) + .withSchema(RECON_SCHEMA_NAME)) .withGenerator(new Generator() .withDatabase(new Database() - .withName("org.jooq.meta.sqlite.SQLiteDatabase") + .withName("org.jooq.meta.derby.DerbyDatabase") .withOutputSchemaToDefault(true) .withIncludeTables(true) .withIncludePrimaryKeys(true)) .withGenerate(new Generate() .withDaos(true) - .withEmptyCatalogs(true) - .withEmptySchemas(true)) + .withEmptyCatalogs(true)) .withStrategy(new Strategy().withName( "org.hadoop.ozone.recon.codegen.TableNamingStrategy")) .withTarget(new Target() @@ -109,20 +113,25 @@ private void generateSourceCode(String outputDir) throws Exception { * Provider for embedded datasource. */ static class LocalDataSourceProvider implements Provider { - private static SQLiteDataSource db; - + private static EmbeddedDataSource dataSource; static { - db = new SQLiteDataSource(); - db.setUrl(JDBC_URL); + try { + createNewDerbyDatabase(JDBC_URL, RECON_SCHEMA_NAME); + } catch (Exception e) { + LOG.error("Error creating Recon Derby DB.", e); + } + dataSource = new EmbeddedDataSource(); + dataSource.setDatabaseName(DB); + dataSource.setUser(RECON_SCHEMA_NAME); } @Override public DataSource get() { - return db; + return dataSource; } static void cleanup() { - FileUtils.deleteQuietly(new File(SQLITE_DB)); + FileUtils.deleteQuietly(new File(DB)); } } diff --git a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/ReconSqlDbConfig.java b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/ReconSqlDbConfig.java new file mode 100644 index 000000000000..93dc10c4f933 --- /dev/null +++ b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/ReconSqlDbConfig.java @@ -0,0 +1,50 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.hadoop.ozone.recon.codegen; + +import org.apache.hadoop.hdds.conf.Config; +import org.apache.hadoop.hdds.conf.ConfigGroup; +import org.apache.hadoop.hdds.conf.ConfigTag; +import org.apache.hadoop.hdds.conf.ConfigType; + +/** + * The configuration class for the Recon SQL DB. + */ +@ConfigGroup(prefix = "ozone.recon.sql.db") +public class ReconSqlDbConfig { + + @Config(key = "jooq.dialect", + type = ConfigType.STRING, + defaultValue = "", + tags = { ConfigTag.STORAGE, ConfigTag.RECON, ConfigTag.OZONE }, + description = "Recon internally uses Jooq to talk to its SQL DB. By " + + "default, we support Derby and Sqlite out of the box. Please refer " + + "to https://www.jooq.org/javadoc/latest/org" + + ".jooq/org/jooq/SQLDialect.html to specify different dialect." + ) + private String sqlDbDialect; + + public String getSqlDbDialect() { + return sqlDbDialect; + } + + public void setSqlDbDialect(String sqlDbDialect) { + this.sqlDbDialect = sqlDbDialect; + } +} diff --git a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/SqlDbUtils.java b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/SqlDbUtils.java new file mode 100644 index 000000000000..86f80974ae8b --- /dev/null +++ b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/SqlDbUtils.java @@ -0,0 +1,77 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.hadoop.ozone.recon.codegen; + +import java.io.IOException; +import java.io.OutputStream; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.SQLException; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Constants and Helper functions for Recon SQL related stuff. + */ +public final class SqlDbUtils { + + public final static String DERBY_DRIVER_CLASS = + "org.apache.derby.jdbc.EmbeddedDriver"; + public final static String SQLITE_DRIVER_CLASS = "org.sqlite.JDBC"; + public final static String DERBY_DISABLE_LOG_METHOD = + SqlDbUtils.class.getName() + ".disableDerbyLogFile"; + + private static final Logger LOG = + LoggerFactory.getLogger(SqlDbUtils.class); + + private SqlDbUtils() { + } + + /** + * Create new Derby Database with URL and schema name. + * @param jdbcUrl JDBC url. + * @param schemaName Schema name + * @throws ClassNotFoundException on not finding driver class. + * @throws SQLException on SQL exception. + */ + public static void createNewDerbyDatabase(String jdbcUrl, String schemaName) + throws ClassNotFoundException, SQLException { + System.setProperty("derby.stream.error.method", + DERBY_DISABLE_LOG_METHOD); + Class.forName(DERBY_DRIVER_CLASS); + try(Connection connection = DriverManager.getConnection(jdbcUrl + + ";user=" + schemaName + + ";create=true")) { + LOG.info("Created derby database at {}.", jdbcUrl); + } + } + + /** + * Used to suppress embedded derby database logging. + * @return No-Op output stream. + */ + public static OutputStream disableDerbyLogFile(){ + return new OutputStream() { + public void write(int b) throws IOException { + // Ignore all log messages + } + }; + } +} diff --git a/hadoop-ozone/recon/pom.xml b/hadoop-ozone/recon/pom.xml index 01d7ecefcd86..422d686bc600 100644 --- a/hadoop-ozone/recon/pom.xml +++ b/hadoop-ozone/recon/pom.xml @@ -331,6 +331,11 @@ bonecp 0.8.0.RELEASE + + org.apache.derby + derby + 10.14.2.0 + org.xerial sqlite-jdbc diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconControllerModule.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconControllerModule.java index 36e4cbd0331f..acab0554d35d 100644 --- a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconControllerModule.java +++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconControllerModule.java @@ -30,11 +30,13 @@ import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.OZONE_RECON_SQL_MAX_CONNECTION_AGE; import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.OZONE_RECON_SQL_MAX_IDLE_CONNECTION_AGE; import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.OZONE_RECON_SQL_MAX_IDLE_CONNECTION_TEST_STMT; +import static org.hadoop.ozone.recon.codegen.SqlDbUtils.DERBY_DRIVER_CLASS; import java.io.IOException; import java.lang.reflect.Constructor; import java.util.List; +import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.hdds.scm.protocol.StorageContainerLocationProtocol; import org.apache.hadoop.hdds.scm.server.OzoneStorageContainerManager; @@ -62,6 +64,7 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.hdds.utils.db.DBStore; import org.apache.ratis.protocol.ClientId; +import org.hadoop.ozone.recon.codegen.ReconSqlDbConfig; import org.hadoop.ozone.recon.schema.tables.daos.ClusterGrowthDailyDao; import org.hadoop.ozone.recon.schema.tables.daos.ContainerHistoryDao; import org.hadoop.ozone.recon.schema.tables.daos.FileCountBySizeDao; @@ -191,7 +194,7 @@ DataSourceConfiguration getDataSourceConfiguration( @Override public String getDriverClass() { return ozoneConfiguration.get(OZONE_RECON_SQL_DB_DRIVER, - "org.sqlite.JDBC"); + DERBY_DRIVER_CLASS); } @Override @@ -223,7 +226,11 @@ public long getConnectionTimeout() { @Override public String getSqlDialect() { - return JooqPersistenceModule.DEFAULT_DIALECT.toString(); + ReconSqlDbConfig sqlDbConfig = + ozoneConfiguration.getObject(ReconSqlDbConfig.class); + return StringUtils.isNotEmpty(sqlDbConfig.getSqlDbDialect()) ? + sqlDbConfig.getSqlDbDialect(): + JooqPersistenceModule.DEFAULT_DIALECT.toString(); } @Override diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/DefaultDataSourceProvider.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/DefaultDataSourceProvider.java index b0b88470036c..2546b298cac6 100644 --- a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/DefaultDataSourceProvider.java +++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/DefaultDataSourceProvider.java @@ -17,9 +17,15 @@ */ package org.apache.hadoop.ozone.recon.persistence; +import static org.hadoop.ozone.recon.codegen.JooqCodeGenerator.RECON_SCHEMA_NAME; +import static org.hadoop.ozone.recon.codegen.SqlDbUtils.createNewDerbyDatabase; + import javax.sql.DataSource; import org.apache.commons.lang3.StringUtils; +import org.apache.derby.jdbc.EmbeddedDataSource; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.sqlite.SQLiteDataSource; import com.google.inject.Inject; @@ -31,6 +37,9 @@ */ public class DefaultDataSourceProvider implements Provider { + private static final Logger LOG = + LoggerFactory.getLogger(DefaultDataSourceProvider.class); + @Inject private DataSourceConfiguration configuration; @@ -43,14 +52,26 @@ public class DefaultDataSourceProvider implements Provider { */ @Override public DataSource get() { - if (StringUtils.contains(configuration.getJdbcUrl(), "sqlite")) { + String jdbcUrl = configuration.getJdbcUrl(); + LOG.info("JDBC Url for Recon : {} ", jdbcUrl); + if (StringUtils.contains(jdbcUrl, "derby")) { + EmbeddedDataSource dataSource = null; + try { + createNewDerbyDatabase(jdbcUrl, RECON_SCHEMA_NAME); + } catch (Exception e) { + LOG.error("Error creating Recon Derby DB.", e); + } + dataSource = new EmbeddedDataSource(); + dataSource.setDatabaseName(jdbcUrl.split(":")[2]); + dataSource.setUser(RECON_SCHEMA_NAME); + return dataSource; + } else if (StringUtils.contains(jdbcUrl, "sqlite")) { SQLiteDataSource ds = new SQLiteDataSource(); ds.setUrl(configuration.getJdbcUrl()); return ds; } BoneCPDataSource cpDataSource = new BoneCPDataSource(); - cpDataSource.setDriverClass(configuration.getDriverClass()); cpDataSource.setJdbcUrl(configuration.getJdbcUrl()); cpDataSource.setUsername(configuration.getUserName()); diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/JooqPersistenceModule.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/JooqPersistenceModule.java index f7ab4a5b6fe6..a28cdf25e96b 100644 --- a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/JooqPersistenceModule.java +++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/JooqPersistenceModule.java @@ -45,7 +45,7 @@ public class JooqPersistenceModule extends AbstractModule { private Provider configurationProvider; - public static final SQLDialect DEFAULT_DIALECT = SQLDialect.SQLITE; + public static final SQLDialect DEFAULT_DIALECT = SQLDialect.DERBY; public JooqPersistenceModule( Provider configurationProvider) { diff --git a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/AbstractReconSqlDBTest.java b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/AbstractReconSqlDBTest.java index ec6610c19149..14d047d2b975 100644 --- a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/AbstractReconSqlDBTest.java +++ b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/AbstractReconSqlDBTest.java @@ -17,8 +17,7 @@ */ package org.apache.hadoop.ozone.recon.persistence; -import static org.apache.hadoop.ozone.recon.ReconControllerModule.ReconDaoBindingModule.RECON_DAO_LIST; -import static org.junit.Assert.assertNotNull; +import static org.hadoop.ozone.recon.codegen.SqlDbUtils.DERBY_DRIVER_CLASS; import java.io.File; import java.io.IOException; @@ -40,7 +39,6 @@ import org.junit.Assert; import org.junit.Before; import org.junit.Rule; -import org.junit.Test; import org.junit.rules.TemporaryFolder; import com.google.inject.AbstractModule; @@ -60,10 +58,22 @@ public class AbstractReconSqlDBTest { private Injector injector; private DSLContext dslContext; + private Provider configurationProvider; public AbstractReconSqlDBTest() { try { temporaryFolder.create(); + configurationProvider = + new DerbyDataSourceConfigurationProvider(temporaryFolder.newFolder()); + } catch (IOException e) { + Assert.fail(); + } + } + + protected AbstractReconSqlDBTest(Provider provider) { + try { + temporaryFolder.create(); + configurationProvider = provider; } catch (IOException e) { Assert.fail(); } @@ -80,12 +90,9 @@ public void createReconSchemaForTest() throws IOException { /** * Get set of Guice modules needed to setup a Recon SQL DB. * @return List of modules. - * @throws IOException on Error. */ - public List getReconSqlDBModules() throws IOException { + public List getReconSqlDBModules() { List modules = new ArrayList<>(); - DataSourceConfigurationProvider configurationProvider = - new DataSourceConfigurationProvider(temporaryFolder.newFolder()); modules.add(new JooqPersistenceModule(configurationProvider)); modules.add(new AbstractModule() { @Override @@ -146,30 +153,16 @@ protected T getSchemaDefinition(Class type) { return injector.getInstance(type); } - /** - * Make sure schema was created correctly. - * @throws SQLException - */ - @Test - public void testSchemaSetup() throws SQLException { - assertNotNull(injector); - assertNotNull(getConfiguration()); - assertNotNull(dslContext); - assertNotNull(getConnection()); - RECON_DAO_LIST.forEach(dao -> { - assertNotNull(getDao(dao)); - }); - } /** - * Local Sqlite datasource provider. + * Local Derby datasource provider. */ - public static class DataSourceConfigurationProvider implements + public static class DerbyDataSourceConfigurationProvider implements Provider { private final File tempDir; - public DataSourceConfigurationProvider(File tempDir) { + public DerbyDataSourceConfigurationProvider(File tempDir) { this.tempDir = tempDir; } @@ -178,13 +171,13 @@ public DataSourceConfiguration get() { return new DataSourceConfiguration() { @Override public String getDriverClass() { - return "org.sqlite.JDBC"; + return DERBY_DRIVER_CLASS; } @Override public String getJdbcUrl() { - return "jdbc:sqlite:" + tempDir.getAbsolutePath() + - File.separator + "sqlite_recon.db"; + return "jdbc:derby:" + tempDir.getAbsolutePath() + + File.separator + "derby_recon.db"; } @Override @@ -209,7 +202,7 @@ public long getConnectionTimeout() { @Override public String getSqlDialect() { - return SQLDialect.SQLITE.toString(); + return SQLDialect.DERBY.toString(); } @Override diff --git a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/TestReconInternalSchemaDefinition.java b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/TestReconInternalSchemaDefinition.java index a771edd3b58e..befd1edb0ebe 100644 --- a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/TestReconInternalSchemaDefinition.java +++ b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/TestReconInternalSchemaDefinition.java @@ -52,9 +52,9 @@ public void testSchemaCreated() throws Exception { expectedPairs.add(new ImmutablePair<>("task_name", Types.VARCHAR)); expectedPairs.add(new ImmutablePair<>("last_updated_timestamp", - Types.INTEGER)); + Types.BIGINT)); expectedPairs.add(new ImmutablePair<>("last_updated_seq_number", - Types.INTEGER)); + Types.BIGINT)); List> actualPairs = new ArrayList<>(); diff --git a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/TestReconWithDifferentSqlDBs.java b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/TestReconWithDifferentSqlDBs.java new file mode 100644 index 000000000000..a82fe7eba77c --- /dev/null +++ b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/TestReconWithDifferentSqlDBs.java @@ -0,0 +1,142 @@ +package org.apache.hadoop.ozone.recon.persistence; + +import static java.util.stream.Collectors.toList; +import static org.apache.hadoop.ozone.recon.ReconControllerModule.ReconDaoBindingModule.RECON_DAO_LIST; +import static org.hadoop.ozone.recon.codegen.SqlDbUtils.SQLITE_DRIVER_CLASS; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; + +import java.io.File; +import java.io.IOException; +import java.sql.SQLException; +import java.util.stream.Stream; + +import org.hadoop.ozone.recon.schema.tables.daos.ReconTaskStatusDao; +import org.hadoop.ozone.recon.schema.tables.pojos.ReconTaskStatus; +import org.jooq.SQLDialect; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +import com.google.inject.Provider; + +/** + * Test Recon schema with different DBs. + */ +@RunWith(Parameterized.class) +public class TestReconWithDifferentSqlDBs extends AbstractReconSqlDBTest { + + public TestReconWithDifferentSqlDBs( + Provider provider) { + super(provider); + } + + @Parameterized.Parameters(name = "{0}") + public static Iterable parameters() throws IOException { + TemporaryFolder temporaryFolder = new TemporaryFolder(); + temporaryFolder.create(); + return Stream.of( + new DerbyDataSourceConfigurationProvider(temporaryFolder.newFolder()), + new SqliteDataSourceConfigurationProvider(temporaryFolder.newFolder())) + .map(each -> new Object[] {each}) + .collect(toList()); + } + + /** + * Make sure schema was created correctly. + * @throws SQLException + */ + @Test + public void testSchemaSetup() throws SQLException { + assertNotNull(getInjector()); + assertNotNull(getConfiguration()); + assertNotNull(getDslContext()); + assertNotNull(getConnection()); + RECON_DAO_LIST.forEach(dao -> { + assertNotNull(getDao(dao)); + }); + ReconTaskStatusDao dao = getDao(ReconTaskStatusDao.class); + dao.insert(new ReconTaskStatus("TestTask", 1L, 2L)); + assertEquals(1, dao.findAll().size()); + } + + /** + * Local Sqlite datasource provider. + */ + public static class SqliteDataSourceConfigurationProvider implements + Provider { + + private final File tempDir; + + public SqliteDataSourceConfigurationProvider(File tempDir) { + this.tempDir = tempDir; + } + + @Override + public DataSourceConfiguration get() { + return new DataSourceConfiguration() { + @Override + public String getDriverClass() { + return SQLITE_DRIVER_CLASS; + } + + @Override + public String getJdbcUrl() { + return "jdbc:sqlite:" + tempDir.getAbsolutePath() + + File.separator + "recon_sqlite.db"; + } + + @Override + public String getUserName() { + return null; + } + + @Override + public String getPassword() { + return null; + } + + @Override + public boolean setAutoCommit() { + return true; + } + + @Override + public long getConnectionTimeout() { + return 10000; + } + + @Override + public String getSqlDialect() { + return SQLDialect.SQLITE.toString(); + } + + @Override + public Integer getMaxActiveConnections() { + return 2; + } + + @Override + public Integer getMaxConnectionAge() { + return 120; + } + + @Override + public Integer getMaxIdleConnectionAge() { + return 120; + } + + @Override + public String getConnectionTestStatement() { + return "SELECT 1"; + } + + @Override + public Integer getIdleConnectionTestPeriod() { + return 30; + } + }; + } + } +} diff --git a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/TestStatsSchemaDefinition.java b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/TestStatsSchemaDefinition.java index bb82119df09f..af08383dabbf 100644 --- a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/TestStatsSchemaDefinition.java +++ b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/TestStatsSchemaDefinition.java @@ -50,9 +50,9 @@ public void testIfStatsSchemaCreated() throws Exception { List> expectedPairs = new ArrayList<>(); expectedPairs.add(new ImmutablePair<>("key", Types.VARCHAR)); - expectedPairs.add(new ImmutablePair<>("value", Types.INTEGER)); + expectedPairs.add(new ImmutablePair<>("value", Types.BIGINT)); expectedPairs.add(new ImmutablePair<>("last_updated_timestamp", - Types.VARCHAR)); + Types.TIMESTAMP)); List> actualPairs = new ArrayList<>(); diff --git a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/TestUtilizationSchemaDefinition.java b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/TestUtilizationSchemaDefinition.java index ea2d08cab0a4..9e781da03be1 100644 --- a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/TestUtilizationSchemaDefinition.java +++ b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/TestUtilizationSchemaDefinition.java @@ -58,12 +58,12 @@ public void testReconSchemaCreated() throws Exception { List> expectedPairs = new ArrayList<>(); - expectedPairs.add(new ImmutablePair<>("timestamp", Types.VARCHAR)); + expectedPairs.add(new ImmutablePair<>("timestamp", Types.TIMESTAMP)); expectedPairs.add(new ImmutablePair<>("datanode_id", Types.INTEGER)); expectedPairs.add(new ImmutablePair<>("datanode_host", Types.VARCHAR)); expectedPairs.add(new ImmutablePair<>("rack_id", Types.VARCHAR)); - expectedPairs.add(new ImmutablePair<>("available_size", Types.INTEGER)); - expectedPairs.add(new ImmutablePair<>("used_size", Types.INTEGER)); + expectedPairs.add(new ImmutablePair<>("available_size", Types.BIGINT)); + expectedPairs.add(new ImmutablePair<>("used_size", Types.BIGINT)); expectedPairs.add(new ImmutablePair<>("container_count", Types.INTEGER)); expectedPairs.add(new ImmutablePair<>("block_count", Types.INTEGER)); @@ -82,9 +82,9 @@ public void testReconSchemaCreated() throws Exception { List> expectedPairsFileCount = new ArrayList<>(); expectedPairsFileCount.add( - new ImmutablePair<>("file_size", Types.INTEGER)); + new ImmutablePair<>("file_size", Types.BIGINT)); expectedPairsFileCount.add( - new ImmutablePair<>("count", Types.INTEGER)); + new ImmutablePair<>("count", Types.BIGINT)); List> actualPairsFileCount = new ArrayList<>(); while(resultSetFileCount.next()) { From 17f56a7ec8a5dc68ac8e48ebaa0a016e7c8e6f6c Mon Sep 17 00:00:00 2001 From: Aravindan Vijayan Date: Wed, 22 Apr 2020 17:53:59 -0700 Subject: [PATCH 2/6] Fix integration test failure. --- .../hadoop/ozone/MiniOzoneClusterImpl.java | 4 +- .../ozone/recon/codegen/SqlDbUtils.java | 20 +++ .../schema/ContainerSchemaDefinition.java | 14 +- .../schema/ReconTaskSchemaDefinition.java | 8 +- .../recon/schema/StatsSchemaDefinition.java | 8 +- .../schema/UtilizationSchemaDefinition.java | 21 ++- hadoop-ozone/recon/pom.xml | 154 +++++++++--------- .../DefaultDataSourceProvider.java | 26 +-- .../persistence/DerbyDataSourceProvider.java | 61 +++++++ .../persistence/SqliteDataSourceProvider.java | 53 ++++++ 10 files changed, 253 insertions(+), 116 deletions(-) create mode 100644 hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/DerbyDataSourceProvider.java create mode 100644 hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/SqliteDataSourceProvider.java diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/MiniOzoneClusterImpl.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/MiniOzoneClusterImpl.java index c3d16631486c..50201b81f90e 100644 --- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/MiniOzoneClusterImpl.java +++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/MiniOzoneClusterImpl.java @@ -804,8 +804,8 @@ protected void configureRecon() throws IOException { .getAbsolutePath()); conf.set(OZONE_RECON_SCM_DB_DIR, tempNewFolder.getAbsolutePath()); - conf.set(OZONE_RECON_SQL_DB_JDBC_URL, "jdbc:sqlite:" + - tempNewFolder.getAbsolutePath() + "/ozone_recon_sqlite.db"); + conf.set(OZONE_RECON_SQL_DB_JDBC_URL, "jdbc:derby:" + + tempNewFolder.getAbsolutePath() + "/ozone_recon_derby.db"); conf.set(OZONE_RECON_HTTP_ADDRESS_KEY, "0.0.0.0:0"); conf.set(OZONE_RECON_DATANODE_ADDRESS_KEY, "0.0.0.0:0"); diff --git a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/SqlDbUtils.java b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/SqlDbUtils.java index 86f80974ae8b..cb4f7a70d03f 100644 --- a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/SqlDbUtils.java +++ b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/SqlDbUtils.java @@ -18,12 +18,17 @@ package org.hadoop.ozone.recon.codegen; +import static org.jooq.impl.DSL.count; + import java.io.IOException; import java.io.OutputStream; import java.sql.Connection; import java.sql.DriverManager; import java.sql.SQLException; +import java.util.function.BiPredicate; +import org.jooq.exception.DataAccessException; +import org.jooq.impl.DSL; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -74,4 +79,19 @@ public void write(int b) throws IOException { } }; } + + /** + * Helper function to check if table exists through JOOQ. + */ + public static final BiPredicate TABLE_EXISTS_CHECK = + (conn, tableName) -> { + try { + DSL.using(conn).select(count()).from(tableName).execute(); + } catch (DataAccessException ex) { + LOG.info(ex.getMessage()); + return false; + } + LOG.info("{} table already exists, skipping creation.", tableName); + return true; + }; } diff --git a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/ContainerSchemaDefinition.java b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/ContainerSchemaDefinition.java index 243cb2443fee..ed60094b58ad 100644 --- a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/ContainerSchemaDefinition.java +++ b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/ContainerSchemaDefinition.java @@ -18,6 +18,8 @@ package org.hadoop.ozone.recon.schema; +import static org.hadoop.ozone.recon.codegen.SqlDbUtils.TABLE_EXISTS_CHECK; + import com.google.inject.Inject; import com.google.inject.Singleton; import org.jooq.DSLContext; @@ -35,9 +37,9 @@ public class ContainerSchemaDefinition implements ReconSchemaDefinition { public static final String CONTAINER_HISTORY_TABLE_NAME = - "container_history"; + "CONTAINER_HISTORY"; public static final String MISSING_CONTAINERS_TABLE_NAME = - "missing_containers"; + "MISSING_CONTAINERS"; private static final String CONTAINER_ID = "container_id"; private final DataSource dataSource; private DSLContext dslContext; @@ -51,8 +53,12 @@ public class ContainerSchemaDefinition implements ReconSchemaDefinition { public void initializeSchema() throws SQLException { Connection conn = dataSource.getConnection(); dslContext = DSL.using(conn); - createContainerHistoryTable(); - createMissingContainersTable(); + if (!TABLE_EXISTS_CHECK.test(conn, CONTAINER_HISTORY_TABLE_NAME)) { + createContainerHistoryTable(); + } + if (!TABLE_EXISTS_CHECK.test(conn, MISSING_CONTAINERS_TABLE_NAME)) { + createMissingContainersTable(); + } } /** diff --git a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/ReconTaskSchemaDefinition.java b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/ReconTaskSchemaDefinition.java index eec3cd5d134b..9b9f36aa4075 100644 --- a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/ReconTaskSchemaDefinition.java +++ b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/ReconTaskSchemaDefinition.java @@ -18,6 +18,8 @@ package org.hadoop.ozone.recon.schema; +import static org.hadoop.ozone.recon.codegen.SqlDbUtils.TABLE_EXISTS_CHECK; + import java.sql.Connection; import java.sql.SQLException; @@ -37,7 +39,7 @@ public class ReconTaskSchemaDefinition implements ReconSchemaDefinition { public static final String RECON_TASK_STATUS_TABLE_NAME = - "recon_task_status"; + "RECON_TASK_STATUS"; private final DataSource dataSource; @Inject @@ -48,7 +50,9 @@ public class ReconTaskSchemaDefinition implements ReconSchemaDefinition { @Override public void initializeSchema() throws SQLException { Connection conn = dataSource.getConnection(); - createReconTaskStatus(conn); + if (!TABLE_EXISTS_CHECK.test(conn, RECON_TASK_STATUS_TABLE_NAME)) { + createReconTaskStatus(conn); + } } /** diff --git a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/StatsSchemaDefinition.java b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/StatsSchemaDefinition.java index 406585dc07d3..adfaca626d33 100644 --- a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/StatsSchemaDefinition.java +++ b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/StatsSchemaDefinition.java @@ -18,6 +18,8 @@ package org.hadoop.ozone.recon.schema; +import static org.hadoop.ozone.recon.codegen.SqlDbUtils.TABLE_EXISTS_CHECK; + import com.google.inject.Inject; import com.google.inject.Singleton; import org.jooq.impl.DSL; @@ -33,7 +35,7 @@ @Singleton public class StatsSchemaDefinition implements ReconSchemaDefinition { - public static final String GLOBAL_STATS_TABLE_NAME = "global_stats"; + public static final String GLOBAL_STATS_TABLE_NAME = "GLOBAL_STATS"; private final DataSource dataSource; @Inject @@ -44,7 +46,9 @@ public class StatsSchemaDefinition implements ReconSchemaDefinition { @Override public void initializeSchema() throws SQLException { Connection conn = dataSource.getConnection(); - createGlobalStatsTable(conn); + if (!TABLE_EXISTS_CHECK.test(conn, GLOBAL_STATS_TABLE_NAME)) { + createGlobalStatsTable(conn); + } } /** diff --git a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/UtilizationSchemaDefinition.java b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/UtilizationSchemaDefinition.java index 95df8f736e86..b8cf7ee31093 100644 --- a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/UtilizationSchemaDefinition.java +++ b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/UtilizationSchemaDefinition.java @@ -17,14 +17,19 @@ */ package org.hadoop.ozone.recon.schema; +import static org.hadoop.ozone.recon.codegen.SqlDbUtils.TABLE_EXISTS_CHECK; + import java.sql.Connection; import java.sql.SQLException; import javax.sql.DataSource; import com.google.inject.Singleton; + import org.jooq.impl.DSL; import org.jooq.impl.SQLDataType; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.transaction.annotation.Transactional; import com.google.inject.Inject; @@ -35,13 +40,15 @@ @Singleton public class UtilizationSchemaDefinition implements ReconSchemaDefinition { + private static final Logger LOG = + LoggerFactory.getLogger(UtilizationSchemaDefinition.class); + private final DataSource dataSource; public static final String CLUSTER_GROWTH_DAILY_TABLE_NAME = - "cluster_growth_daily"; - + "CLUSTER_GROWTH_DAILY"; public static final String FILE_COUNT_BY_SIZE_TABLE_NAME = - "file_count_by_size"; + "FILE_COUNT_BY_SIZE"; @Inject UtilizationSchemaDefinition(DataSource dataSource) { @@ -52,8 +59,12 @@ public class UtilizationSchemaDefinition implements ReconSchemaDefinition { @Transactional public void initializeSchema() throws SQLException { Connection conn = dataSource.getConnection(); - createClusterGrowthTable(conn); - createFileSizeCount(conn); + if (!TABLE_EXISTS_CHECK.test(conn, FILE_COUNT_BY_SIZE_TABLE_NAME)) { + createFileSizeCount(conn); + } + if (!TABLE_EXISTS_CHECK.test(conn, CLUSTER_GROWTH_DAILY_TABLE_NAME)) { + createClusterGrowthTable(conn); + } } private void createClusterGrowthTable(Connection conn) { diff --git a/hadoop-ozone/recon/pom.xml b/hadoop-ozone/recon/pom.xml index 422d686bc600..c93c13819023 100644 --- a/hadoop-ozone/recon/pom.xml +++ b/hadoop-ozone/recon/pom.xml @@ -84,83 +84,83 @@ 2. to install dependencies with yarn install 3. building the frontend application --> - - com.github.eirslett - frontend-maven-plugin - 1.6 - - target - ${basedir}/src/main/resources/webapps/recon/ozone-recon-web - - - - Install node and yarn locally to the project - - install-node-and-yarn - - - v12.1.0 - v1.9.2 - - - - yarn install - - yarn - - - install - - - - Build frontend - - yarn - - - run build - - - - - - org.apache.maven.plugins - maven-resources-plugin - - - Copy frontend build to target - process-resources - - copy-resources - - - ${project.build.outputDirectory}/webapps/recon - - - ${basedir}/src/main/resources/webapps/recon/ozone-recon-web/build - true - - - - - - Copy frontend static files to target - process-resources - - copy-resources - - - ${project.build.outputDirectory}/webapps/static - - - ${basedir}/src/main/resources/webapps/recon/ozone-recon-web/build/static - true - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/DefaultDataSourceProvider.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/DefaultDataSourceProvider.java index 2546b298cac6..42cde7d149d5 100644 --- a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/DefaultDataSourceProvider.java +++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/DefaultDataSourceProvider.java @@ -17,16 +17,9 @@ */ package org.apache.hadoop.ozone.recon.persistence; -import static org.hadoop.ozone.recon.codegen.JooqCodeGenerator.RECON_SCHEMA_NAME; -import static org.hadoop.ozone.recon.codegen.SqlDbUtils.createNewDerbyDatabase; - import javax.sql.DataSource; import org.apache.commons.lang3.StringUtils; -import org.apache.derby.jdbc.EmbeddedDataSource; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.sqlite.SQLiteDataSource; import com.google.inject.Inject; import com.google.inject.Provider; @@ -37,9 +30,6 @@ */ public class DefaultDataSourceProvider implements Provider { - private static final Logger LOG = - LoggerFactory.getLogger(DefaultDataSourceProvider.class); - @Inject private DataSourceConfiguration configuration; @@ -53,22 +43,10 @@ public class DefaultDataSourceProvider implements Provider { @Override public DataSource get() { String jdbcUrl = configuration.getJdbcUrl(); - LOG.info("JDBC Url for Recon : {} ", jdbcUrl); if (StringUtils.contains(jdbcUrl, "derby")) { - EmbeddedDataSource dataSource = null; - try { - createNewDerbyDatabase(jdbcUrl, RECON_SCHEMA_NAME); - } catch (Exception e) { - LOG.error("Error creating Recon Derby DB.", e); - } - dataSource = new EmbeddedDataSource(); - dataSource.setDatabaseName(jdbcUrl.split(":")[2]); - dataSource.setUser(RECON_SCHEMA_NAME); - return dataSource; + return new DerbyDataSourceProvider(configuration).get(); } else if (StringUtils.contains(jdbcUrl, "sqlite")) { - SQLiteDataSource ds = new SQLiteDataSource(); - ds.setUrl(configuration.getJdbcUrl()); - return ds; + return new SqliteDataSourceProvider(configuration).get(); } BoneCPDataSource cpDataSource = new BoneCPDataSource(); diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/DerbyDataSourceProvider.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/DerbyDataSourceProvider.java new file mode 100644 index 000000000000..51678c011675 --- /dev/null +++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/DerbyDataSourceProvider.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.ozone.recon.persistence; + +import static org.hadoop.ozone.recon.codegen.JooqCodeGenerator.RECON_SCHEMA_NAME; +import static org.hadoop.ozone.recon.codegen.SqlDbUtils.createNewDerbyDatabase; + +import javax.sql.DataSource; + +import org.apache.derby.jdbc.EmbeddedDataSource; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.inject.Inject; +import com.google.inject.Provider; + +/** + * Provide a {@link javax.sql.DataSource} for the application. + */ +public class DerbyDataSourceProvider implements Provider { + + private static final Logger LOG = + LoggerFactory.getLogger(DerbyDataSourceProvider.class); + + private DataSourceConfiguration configuration; + + @Inject + DerbyDataSourceProvider(DataSourceConfiguration configuration) { + this.configuration = configuration; + } + + @Override + public DataSource get() { + String jdbcUrl = configuration.getJdbcUrl(); + LOG.info("JDBC Url for Recon : {} ", jdbcUrl); + try { + createNewDerbyDatabase(jdbcUrl, RECON_SCHEMA_NAME); + } catch (Exception e) { + LOG.error("Error creating Recon Derby DB.", e); + } + EmbeddedDataSource dataSource = new EmbeddedDataSource(); + dataSource.setDatabaseName(jdbcUrl.split(":")[2]); + dataSource.setUser(RECON_SCHEMA_NAME); + return dataSource; + } +} diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/SqliteDataSourceProvider.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/SqliteDataSourceProvider.java new file mode 100644 index 000000000000..897f8be8c7fa --- /dev/null +++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/SqliteDataSourceProvider.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.ozone.recon.persistence; + +import javax.sql.DataSource; + +import org.sqlite.SQLiteDataSource; + +import com.google.inject.Inject; +import com.google.inject.Provider; + +/** + * Provide a {@link javax.sql.DataSource} for the application. + */ +public class SqliteDataSourceProvider implements Provider { + + private DataSourceConfiguration configuration; + + @Inject + public SqliteDataSourceProvider(DataSourceConfiguration configuration) { + this.configuration = configuration; + } + + /** + * Create a pooled datasource for the application. + *

+ * Default sqlite database does not work with a connection pool, actually + * most embedded databases do not, hence returning native implementation for + * default db. + */ + @Override + public DataSource get() { + SQLiteDataSource ds = new SQLiteDataSource(); + ds.setUrl(configuration.getJdbcUrl()); + return ds; + } +} \ No newline at end of file From 83a85a6c96160c56ef7b0962f9f4acf4fc131c74 Mon Sep 17 00:00:00 2001 From: Aravindan Vijayan Date: Wed, 22 Apr 2020 17:59:32 -0700 Subject: [PATCH 3/6] Rat check. --- hadoop-ozone/recon/pom.xml | 154 +++++++++--------- .../TestReconWithDifferentSqlDBs.java | 17 ++ 2 files changed, 94 insertions(+), 77 deletions(-) diff --git a/hadoop-ozone/recon/pom.xml b/hadoop-ozone/recon/pom.xml index c93c13819023..422d686bc600 100644 --- a/hadoop-ozone/recon/pom.xml +++ b/hadoop-ozone/recon/pom.xml @@ -84,83 +84,83 @@ 2. to install dependencies with yarn install 3. building the frontend application --> - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + com.github.eirslett + frontend-maven-plugin + 1.6 + + target + ${basedir}/src/main/resources/webapps/recon/ozone-recon-web + + + + Install node and yarn locally to the project + + install-node-and-yarn + + + v12.1.0 + v1.9.2 + + + + yarn install + + yarn + + + install + + + + Build frontend + + yarn + + + run build + + + + + + org.apache.maven.plugins + maven-resources-plugin + + + Copy frontend build to target + process-resources + + copy-resources + + + ${project.build.outputDirectory}/webapps/recon + + + ${basedir}/src/main/resources/webapps/recon/ozone-recon-web/build + true + + + + + + Copy frontend static files to target + process-resources + + copy-resources + + + ${project.build.outputDirectory}/webapps/static + + + ${basedir}/src/main/resources/webapps/recon/ozone-recon-web/build/static + true + + + + + + diff --git a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/TestReconWithDifferentSqlDBs.java b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/TestReconWithDifferentSqlDBs.java index a82fe7eba77c..3a93a0dcdc8b 100644 --- a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/TestReconWithDifferentSqlDBs.java +++ b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/TestReconWithDifferentSqlDBs.java @@ -1,3 +1,20 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + package org.apache.hadoop.ozone.recon.persistence; import static java.util.stream.Collectors.toList; From e22f0905691b3b79d860ea8ae112a352372fd516 Mon Sep 17 00:00:00 2001 From: Aravindan Vijayan Date: Mon, 27 Apr 2020 12:32:20 -0700 Subject: [PATCH 4/6] Address review comments, move SQL DB configs to Java based. --- .../src/main/resources/ozone-default.xml | 95 --------- .../recon/codegen/JooqCodeGenerator.java | 10 +- .../ozone/recon/codegen/ReconSqlDbConfig.java | 185 +++++++++++++++++- .../ozone/recon/codegen/SqlDbUtils.java | 2 +- .../schema/ReconTaskSchemaDefinition.java | 4 +- .../schema/UtilizationSchemaDefinition.java | 4 +- .../ozone/recon/ReconControllerModule.java | 58 ++---- .../ozone/recon/ReconServerConfigKeys.java | 20 -- .../persistence/DataSourceConfiguration.java | 6 +- .../persistence/AbstractReconSqlDBTest.java | 6 +- .../TestReconWithDifferentSqlDBs.java | 6 +- 11 files changed, 221 insertions(+), 175 deletions(-) diff --git a/hadoop-hdds/common/src/main/resources/ozone-default.xml b/hadoop-hdds/common/src/main/resources/ozone-default.xml index fef2192be105..ed8cecd903f9 100644 --- a/hadoop-hdds/common/src/main/resources/ozone-default.xml +++ b/hadoop-hdds/common/src/main/resources/ozone-default.xml @@ -2371,101 +2371,6 @@ If enabled, tracing information is sent to tracing server. - - ozone.recon.sql.db.driver - org.apache.derby.jdbc.EmbeddedDriver - OZONE, RECON - - Database driver class name available on the - Ozone Recon classpath. - - - - ozone.recon.sql.db.jdbc.url - jdbc:derby:${ozone.recon.db.dir}/ozone_recon_derby.db - OZONE, RECON - - Ozone Recon SQL database jdbc url. - - - - ozone.recon.sql.db.username - - OZONE, RECON - - Ozone Recon SQL database username. - - - - ozone.recon.sql.db.password - - OZONE, RECON - - Ozone Recon database password. - - - - ozone.recon.sql.db.auto.commit - false - OZONE, RECON - - Sets the Ozone Recon database connection property of auto-commit to - true/false. - - - - ozone.recon.sql.db.conn.timeout - 30000 - OZONE, RECON - - Sets time in milliseconds before call to getConnection is timed out. - - - - ozone.recon.sql.db.conn.max.active - 1 - OZONE, RECON - - The max active connections to the SQL database. The default SQLite - database only allows single active connection, set this to a - reasonable value like 10, for external production database. - - - - ozone.recon.sql.db.conn.max.age - 1800 - OZONE, RECON - - Sets maximum time a connection can be active in seconds. - - - - ozone.recon.sql.db.conn.idle.max.age - 3600 - OZONE, RECON - - Sets maximum time to live for idle connection in seconds. - - - - ozone.recon.sql.db.conn.idle.test.period - 60 - OZONE, RECON - - This sets the time (in seconds), for a connection to remain idle before - sending a test query to the DB. This is useful to prevent a DB from - timing out connections on its end. - - - - ozone.recon.sql.db.conn.idle.test - SELECT 1 - OZONE, RECON - - The query to send to the DB to maintain keep-alives and test for dead - connections. - - ozone.recon.task.thread.count 1 diff --git a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/JooqCodeGenerator.java b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/JooqCodeGenerator.java index 47653a994a83..246f03910c96 100644 --- a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/JooqCodeGenerator.java +++ b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/JooqCodeGenerator.java @@ -37,6 +37,7 @@ import org.jooq.meta.jaxb.Generate; import org.jooq.meta.jaxb.Generator; import org.jooq.meta.jaxb.Jdbc; +import org.jooq.meta.jaxb.Logging; import org.jooq.meta.jaxb.Strategy; import org.jooq.meta.jaxb.Target; import org.slf4j.Logger; @@ -89,14 +90,14 @@ private void generateSourceCode(String outputDir) throws Exception { new Configuration() .withJdbc(new Jdbc() .withDriver(DERBY_DRIVER_CLASS) - .withUrl(JDBC_URL) - .withSchema(RECON_SCHEMA_NAME)) + .withUrl(JDBC_URL)) .withGenerator(new Generator() .withDatabase(new Database() .withName("org.jooq.meta.derby.DerbyDatabase") .withOutputSchemaToDefault(true) .withIncludeTables(true) - .withIncludePrimaryKeys(true)) + .withIncludePrimaryKeys(true) + .withInputSchema(RECON_SCHEMA_NAME)) .withGenerate(new Generate() .withDaos(true) .withEmptyCatalogs(true)) @@ -105,7 +106,8 @@ private void generateSourceCode(String outputDir) throws Exception { .withTarget(new Target() .withPackageName("org.hadoop.ozone.recon.schema") .withClean(true) - .withDirectory(outputDir))); + .withDirectory(outputDir))) + .withLogging(Logging.WARN); GenerationTool.generate(configuration); } diff --git a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/ReconSqlDbConfig.java b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/ReconSqlDbConfig.java index 93dc10c4f933..23300321c878 100644 --- a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/ReconSqlDbConfig.java +++ b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/ReconSqlDbConfig.java @@ -18,6 +18,8 @@ package org.hadoop.ozone.recon.codegen; +import static java.util.concurrent.TimeUnit.SECONDS; + import org.apache.hadoop.hdds.conf.Config; import org.apache.hadoop.hdds.conf.ConfigGroup; import org.apache.hadoop.hdds.conf.ConfigTag; @@ -29,10 +31,189 @@ @ConfigGroup(prefix = "ozone.recon.sql.db") public class ReconSqlDbConfig { - @Config(key = "jooq.dialect", + @Config(key = "driver", + type = ConfigType.STRING, + defaultValue = "org.apache.derby.jdbc.EmbeddedDriver", + tags = {ConfigTag.STORAGE, ConfigTag.RECON, ConfigTag.OZONE}, + description = "Recon SQL DB driver class. Defaults to Derby." + ) + private String driverClass; + + public String getDriverClass() { + return driverClass; + } + + public void setDriverClass(String driverClass) { + this.driverClass = driverClass; + } + + @Config(key = "jdbc.url", + type = ConfigType.STRING, + defaultValue = "jdbc:derby:${ozone.recon.db.dir}/ozone_recon_derby.db", + tags = {ConfigTag.STORAGE, ConfigTag.RECON, ConfigTag.OZONE}, + description = "Ozone Recon SQL database jdbc url." + ) + private String jdbcUrl; + + public String getJdbcUrl() { + return jdbcUrl; + } + + public void setJdbcUrl(String jdbcUrl) { + this.jdbcUrl = jdbcUrl; + } + + @Config(key = "username", + type = ConfigType.STRING, + defaultValue = "", + tags = {ConfigTag.STORAGE, ConfigTag.RECON, ConfigTag.OZONE}, + description = "Ozone Recon SQL database username." + ) + private String username; + + public String getUsername() { + return username; + } + + public void setUsername(String username) { + this.username = username; + } + + @Config(key = "password", type = ConfigType.STRING, defaultValue = "", - tags = { ConfigTag.STORAGE, ConfigTag.RECON, ConfigTag.OZONE }, + tags = {ConfigTag.STORAGE, ConfigTag.RECON, ConfigTag.OZONE}, + description = "Ozone Recon SQL database password." + ) + private String password; + + public String getPassword() { + return password; + } + + public void setPassword(String password) { + this.password = password; + } + + @Config(key = "auto.commit", + type = ConfigType.BOOLEAN, + defaultValue = "false", + tags = {ConfigTag.STORAGE, ConfigTag.RECON, ConfigTag.OZONE}, + description = "Sets the Ozone Recon database connection property of " + + "auto-commit to true/false." + ) + private boolean autoCommit; + + public boolean isAutoCommit() { + return autoCommit; + } + + public void setAutoCommit(boolean autoCommit) { + this.autoCommit = autoCommit; + } + + @Config(key = "conn.timeout", + type = ConfigType.TIME, + defaultValue = "30000ms", + tags = {ConfigTag.STORAGE, ConfigTag.RECON, ConfigTag.OZONE}, + description = "Sets time in milliseconds before call to getConnection " + + "is timed out." + ) + private long connectionTimeout; + + public long getConnectionTimeout() { + return connectionTimeout; + } + + public void setConnectionTimeout(long connectionTimeout) { + this.connectionTimeout = connectionTimeout; + } + + @Config(key = "conn.max.active", + type = ConfigType.INT, + defaultValue = "5", + tags = {ConfigTag.STORAGE, ConfigTag.RECON, ConfigTag.OZONE}, + description = "The max active connections to the SQL database." + ) + private int maxActiveConnections; + + public int getMaxActiveConnections() { + return maxActiveConnections; + } + + public void setMaxActiveConnections(int maxActiveConnections) { + this.maxActiveConnections = maxActiveConnections; + } + + @Config(key = "conn.max.age", + type = ConfigType.TIME, timeUnit = SECONDS, + defaultValue = "1800s", + tags = {ConfigTag.STORAGE, ConfigTag.RECON, ConfigTag.OZONE}, + description = "Sets maximum time a connection can be active in seconds." + ) + private long connectionMaxAge; + + public long getConnectionMaxAge() { + return connectionMaxAge; + } + + public void setConnectionMaxAge(long connectionMaxAge) { + this.connectionMaxAge = connectionMaxAge; + } + + @Config(key = "conn.idle.max.age", + type = ConfigType.TIME, timeUnit = SECONDS, + defaultValue = "3600s", + tags = {ConfigTag.STORAGE, ConfigTag.RECON, ConfigTag.OZONE}, + description = "Sets maximum time to live for idle connection in seconds." + ) + private long connectionIdleMaxAge; + + public long getConnectionIdleMaxAge() { + return connectionIdleMaxAge; + } + + public void setConnectionIdleMaxAge(long connectionIdleMaxAge) { + this.connectionIdleMaxAge = connectionIdleMaxAge; + } + + @Config(key = "conn.idle.test.period", + type = ConfigType.TIME, timeUnit = SECONDS, + defaultValue = "60s", + tags = {ConfigTag.STORAGE, ConfigTag.RECON, ConfigTag.OZONE}, + description = "Sets maximum time to live for idle connection in seconds." + ) + private long connectionIdleTestPeriod; + + public long getConnectionIdleTestPeriod() { + return connectionIdleTestPeriod; + } + + public void setConnectionIdleTestPeriod(long connectionIdleTestPeriod) { + this.connectionIdleTestPeriod = connectionIdleTestPeriod; + } + + @Config(key = "conn.idle.test", + type = ConfigType.STRING, + defaultValue = "SELECT 1", + tags = {ConfigTag.STORAGE, ConfigTag.RECON, ConfigTag.OZONE}, + description = "The query to send to the DB to maintain keep-alives and " + + "test for dead connections." + ) + private String idleTestQuery; + + public String getIdleTestQuery() { + return idleTestQuery; + } + + public void setIdleTestQuery(String idleTestQuery) { + this.idleTestQuery = idleTestQuery; + } + + @Config(key = "jooq.dialect", + type = ConfigType.STRING, + defaultValue = "DERBY", + tags = {ConfigTag.STORAGE, ConfigTag.RECON, ConfigTag.OZONE}, description = "Recon internally uses Jooq to talk to its SQL DB. By " + "default, we support Derby and Sqlite out of the box. Please refer " + "to https://www.jooq.org/javadoc/latest/org" + diff --git a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/SqlDbUtils.java b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/SqlDbUtils.java index cb4f7a70d03f..7e68541cf840 100644 --- a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/SqlDbUtils.java +++ b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/SqlDbUtils.java @@ -88,7 +88,7 @@ public void write(int b) throws IOException { try { DSL.using(conn).select(count()).from(tableName).execute(); } catch (DataAccessException ex) { - LOG.info(ex.getMessage()); + LOG.debug(ex.getMessage()); return false; } LOG.info("{} table already exists, skipping creation.", tableName); diff --git a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/ReconTaskSchemaDefinition.java b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/ReconTaskSchemaDefinition.java index 9b9f36aa4075..45fc1ba0d73b 100644 --- a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/ReconTaskSchemaDefinition.java +++ b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/ReconTaskSchemaDefinition.java @@ -51,7 +51,7 @@ public class ReconTaskSchemaDefinition implements ReconSchemaDefinition { public void initializeSchema() throws SQLException { Connection conn = dataSource.getConnection(); if (!TABLE_EXISTS_CHECK.test(conn, RECON_TASK_STATUS_TABLE_NAME)) { - createReconTaskStatus(conn); + createReconTaskStatusTable(conn); } } @@ -59,7 +59,7 @@ public void initializeSchema() throws SQLException { * Create the Recon Task Status table. * @param conn connection */ - private void createReconTaskStatus(Connection conn) { + private void createReconTaskStatusTable(Connection conn) { DSL.using(conn).createTableIfNotExists(RECON_TASK_STATUS_TABLE_NAME) .column("task_name", SQLDataType.VARCHAR(1024)) .column("last_updated_timestamp", SQLDataType.BIGINT) diff --git a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/UtilizationSchemaDefinition.java b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/UtilizationSchemaDefinition.java index b8cf7ee31093..941a3c635f01 100644 --- a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/UtilizationSchemaDefinition.java +++ b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/schema/UtilizationSchemaDefinition.java @@ -60,7 +60,7 @@ public class UtilizationSchemaDefinition implements ReconSchemaDefinition { public void initializeSchema() throws SQLException { Connection conn = dataSource.getConnection(); if (!TABLE_EXISTS_CHECK.test(conn, FILE_COUNT_BY_SIZE_TABLE_NAME)) { - createFileSizeCount(conn); + createFileSizeCountTable(conn); } if (!TABLE_EXISTS_CHECK.test(conn, CLUSTER_GROWTH_DAILY_TABLE_NAME)) { createClusterGrowthTable(conn); @@ -82,7 +82,7 @@ private void createClusterGrowthTable(Connection conn) { .execute(); } - private void createFileSizeCount(Connection conn) { + private void createFileSizeCountTable(Connection conn) { DSL.using(conn).createTableIfNotExists(FILE_COUNT_BY_SIZE_TABLE_NAME) .column("file_size", SQLDataType.BIGINT) .column("count", SQLDataType.BIGINT) diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconControllerModule.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconControllerModule.java index acab0554d35d..590997ec543d 100644 --- a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconControllerModule.java +++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconControllerModule.java @@ -19,24 +19,11 @@ import static org.apache.hadoop.hdds.scm.cli.ContainerOperationClient.newContainerRpcClient; import static org.apache.hadoop.ozone.om.OMConfigKeys.OZONE_OM_INTERNAL_SERVICE_ID; -import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.OZONE_RECON_SQL_AUTO_COMMIT; -import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.OZONE_RECON_SQL_CONNECTION_TIMEOUT; -import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.OZONE_RECON_SQL_DB_DRIVER; -import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.OZONE_RECON_SQL_DB_JDBC_URL; -import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.OZONE_RECON_SQL_DB_PASSWORD; -import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.OZONE_RECON_SQL_DB_USER; -import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.OZONE_RECON_SQL_IDLE_CONNECTION_TEST_PERIOD; -import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.OZONE_RECON_SQL_MAX_ACTIVE_CONNECTIONS; -import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.OZONE_RECON_SQL_MAX_CONNECTION_AGE; -import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.OZONE_RECON_SQL_MAX_IDLE_CONNECTION_AGE; -import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.OZONE_RECON_SQL_MAX_IDLE_CONNECTION_TEST_STMT; -import static org.hadoop.ozone.recon.codegen.SqlDbUtils.DERBY_DRIVER_CLASS; import java.io.IOException; import java.lang.reflect.Constructor; import java.util.List; -import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.hdds.scm.protocol.StorageContainerLocationProtocol; import org.apache.hadoop.hdds.scm.server.OzoneStorageContainerManager; @@ -190,77 +177,68 @@ StorageContainerLocationProtocol getSCMProtocol( DataSourceConfiguration getDataSourceConfiguration( final OzoneConfiguration ozoneConfiguration) { + ReconSqlDbConfig sqlDbConfig = + ozoneConfiguration.getObject(ReconSqlDbConfig.class); + return new DataSourceConfiguration() { @Override public String getDriverClass() { - return ozoneConfiguration.get(OZONE_RECON_SQL_DB_DRIVER, - DERBY_DRIVER_CLASS); + return sqlDbConfig.getDriverClass(); } @Override public String getJdbcUrl() { - return ozoneConfiguration.get(OZONE_RECON_SQL_DB_JDBC_URL); + return sqlDbConfig.getJdbcUrl(); } @Override public String getUserName() { - return ozoneConfiguration.get(OZONE_RECON_SQL_DB_USER); + return sqlDbConfig.getUsername(); } @Override public String getPassword() { - return ozoneConfiguration.get(OZONE_RECON_SQL_DB_PASSWORD); + return sqlDbConfig.getPassword(); } @Override public boolean setAutoCommit() { - return ozoneConfiguration.getBoolean( - OZONE_RECON_SQL_AUTO_COMMIT, false); + return sqlDbConfig.isAutoCommit(); } @Override public long getConnectionTimeout() { - return ozoneConfiguration.getLong( - OZONE_RECON_SQL_CONNECTION_TIMEOUT, 30000); + return sqlDbConfig.getConnectionTimeout(); } @Override public String getSqlDialect() { - ReconSqlDbConfig sqlDbConfig = - ozoneConfiguration.getObject(ReconSqlDbConfig.class); - return StringUtils.isNotEmpty(sqlDbConfig.getSqlDbDialect()) ? - sqlDbConfig.getSqlDbDialect(): - JooqPersistenceModule.DEFAULT_DIALECT.toString(); + return sqlDbConfig.getSqlDbDialect(); } @Override public Integer getMaxActiveConnections() { - return ozoneConfiguration.getInt( - OZONE_RECON_SQL_MAX_ACTIVE_CONNECTIONS, 10); + return sqlDbConfig.getMaxActiveConnections(); } @Override - public Integer getMaxConnectionAge() { - return ozoneConfiguration.getInt( - OZONE_RECON_SQL_MAX_CONNECTION_AGE, 1800); + public long getMaxConnectionAge() { + return sqlDbConfig.getConnectionMaxAge(); } @Override - public Integer getMaxIdleConnectionAge() { - return ozoneConfiguration.getInt( - OZONE_RECON_SQL_MAX_IDLE_CONNECTION_AGE, 3600); + public long getMaxIdleConnectionAge() { + return sqlDbConfig.getConnectionIdleMaxAge(); } @Override public String getConnectionTestStatement() { - return ozoneConfiguration.get( - OZONE_RECON_SQL_MAX_IDLE_CONNECTION_TEST_STMT, "SELECT 1"); + return sqlDbConfig.getIdleTestQuery(); } @Override - public Integer getIdleConnectionTestPeriod() { - return ozoneConfiguration.getInt( - OZONE_RECON_SQL_IDLE_CONNECTION_TEST_PERIOD, 60); + public long getIdleConnectionTestPeriod() { + return sqlDbConfig.getConnectionIdleTestPeriod(); } }; diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconServerConfigKeys.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconServerConfigKeys.java index 9d037c31ca73..e4afd14c6ea3 100644 --- a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconServerConfigKeys.java +++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconServerConfigKeys.java @@ -97,28 +97,8 @@ public final class ReconServerConfigKeys { "recon.om.snapshot.task.flush.param"; // Persistence properties - public static final String OZONE_RECON_SQL_DB_DRIVER = - "ozone.recon.sql.db.driver"; public static final String OZONE_RECON_SQL_DB_JDBC_URL = "ozone.recon.sql.db.jdbc.url"; - public static final String OZONE_RECON_SQL_DB_USER = - "ozone.recon.sql.db.username"; - public static final String OZONE_RECON_SQL_DB_PASSWORD = - "ozone.recon.sql.db.password"; - public static final String OZONE_RECON_SQL_AUTO_COMMIT = - "ozone.recon.sql.db.auto.commit"; - public static final String OZONE_RECON_SQL_CONNECTION_TIMEOUT = - "ozone.recon.sql.db.conn.timeout"; - public static final String OZONE_RECON_SQL_MAX_ACTIVE_CONNECTIONS = - "ozone.recon.sql.db.conn.max.active"; - public static final String OZONE_RECON_SQL_MAX_CONNECTION_AGE = - "ozone.recon.sql.db.conn.max.age"; - public static final String OZONE_RECON_SQL_MAX_IDLE_CONNECTION_AGE = - "ozone.recon.sql.db.conn.idle.max.age"; - public static final String OZONE_RECON_SQL_IDLE_CONNECTION_TEST_PERIOD = - "ozone.recon.sql.db.conn.idle.test.period"; - public static final String OZONE_RECON_SQL_MAX_IDLE_CONNECTION_TEST_STMT = - "ozone.recon.sql.db.conn.idle.test"; public static final String OZONE_RECON_TASK_THREAD_COUNT_KEY = "ozone.recon.task.thread.count"; diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/DataSourceConfiguration.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/DataSourceConfiguration.java index 54ef88860a69..7e97c4f76e51 100644 --- a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/DataSourceConfiguration.java +++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/persistence/DataSourceConfiguration.java @@ -66,12 +66,12 @@ public interface DataSourceConfiguration { /** * Sets the maximum connection age (in seconds). */ - Integer getMaxConnectionAge(); + long getMaxConnectionAge(); /** * Sets the maximum idle connection age (in seconds). */ - Integer getMaxIdleConnectionAge(); + long getMaxIdleConnectionAge(); /** * Statement specific to database, usually SELECT 1. @@ -81,5 +81,5 @@ public interface DataSourceConfiguration { /** * How often to test idle connections for being active (in seconds). */ - Integer getIdleConnectionTestPeriod(); + long getIdleConnectionTestPeriod(); } diff --git a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/AbstractReconSqlDBTest.java b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/AbstractReconSqlDBTest.java index 1cc0f2d45334..664a7321d9a2 100644 --- a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/AbstractReconSqlDBTest.java +++ b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/AbstractReconSqlDBTest.java @@ -210,12 +210,12 @@ public Integer getMaxActiveConnections() { } @Override - public Integer getMaxConnectionAge() { + public long getMaxConnectionAge() { return 120; } @Override - public Integer getMaxIdleConnectionAge() { + public long getMaxIdleConnectionAge() { return 120; } @@ -225,7 +225,7 @@ public String getConnectionTestStatement() { } @Override - public Integer getIdleConnectionTestPeriod() { + public long getIdleConnectionTestPeriod() { return 30; } }; diff --git a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/TestReconWithDifferentSqlDBs.java b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/TestReconWithDifferentSqlDBs.java index 3a93a0dcdc8b..12b9659cd5fd 100644 --- a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/TestReconWithDifferentSqlDBs.java +++ b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/persistence/TestReconWithDifferentSqlDBs.java @@ -135,12 +135,12 @@ public Integer getMaxActiveConnections() { } @Override - public Integer getMaxConnectionAge() { + public long getMaxConnectionAge() { return 120; } @Override - public Integer getMaxIdleConnectionAge() { + public long getMaxIdleConnectionAge() { return 120; } @@ -150,7 +150,7 @@ public String getConnectionTestStatement() { } @Override - public Integer getIdleConnectionTestPeriod() { + public long getIdleConnectionTestPeriod() { return 30; } }; From 523f50b4dc19a8b0da5fd185004811e59f236dab Mon Sep 17 00:00:00 2001 From: Aravindan Vijayan Date: Mon, 27 Apr 2020 14:48:45 -0700 Subject: [PATCH 5/6] trigger new CI check From fa85ecd7376a8ca34598a68a52f9c6fb2d3206ef Mon Sep 17 00:00:00 2001 From: Aravindan Vijayan Date: Mon, 27 Apr 2020 16:04:06 -0700 Subject: [PATCH 6/6] Fix TestOzoneConfigurationFields integration test failure. --- .../org/apache/hadoop/ozone/MiniOzoneClusterImpl.java | 2 +- .../org/hadoop/ozone/recon/codegen/ReconSqlDbConfig.java | 8 ++++++++ .../apache/hadoop/ozone/recon/ReconServerConfigKeys.java | 4 ---- 3 files changed, 9 insertions(+), 5 deletions(-) diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/MiniOzoneClusterImpl.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/MiniOzoneClusterImpl.java index c88c4fbd558f..5baa65b43c37 100644 --- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/MiniOzoneClusterImpl.java +++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/MiniOzoneClusterImpl.java @@ -29,7 +29,7 @@ import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.OZONE_RECON_HTTP_ADDRESS_KEY; import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.OZONE_RECON_OM_SNAPSHOT_DB_DIR; import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.OZONE_RECON_SCM_DB_DIR; -import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.OZONE_RECON_SQL_DB_JDBC_URL; +import static org.hadoop.ozone.recon.codegen.ReconSqlDbConfig.ConfigKeys.OZONE_RECON_SQL_DB_JDBC_URL; import java.io.File; import java.io.IOException; diff --git a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/ReconSqlDbConfig.java b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/ReconSqlDbConfig.java index 23300321c878..704d26bdecce 100644 --- a/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/ReconSqlDbConfig.java +++ b/hadoop-ozone/recon-codegen/src/main/java/org/hadoop/ozone/recon/codegen/ReconSqlDbConfig.java @@ -228,4 +228,12 @@ public String getSqlDbDialect() { public void setSqlDbDialect(String sqlDbDialect) { this.sqlDbDialect = sqlDbDialect; } + + /** + * Class to hold config keys related to Recon SQL DB. + */ + public static class ConfigKeys { + public static final String OZONE_RECON_SQL_DB_JDBC_URL = + "ozone.recon.sql.db.jdbc.url"; + } } diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconServerConfigKeys.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconServerConfigKeys.java index e4afd14c6ea3..d2eb8e1654f8 100644 --- a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconServerConfigKeys.java +++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconServerConfigKeys.java @@ -96,10 +96,6 @@ public final class ReconServerConfigKeys { public static final String RECON_OM_SNAPSHOT_TASK_FLUSH_PARAM = "recon.om.snapshot.task.flush.param"; - // Persistence properties - public static final String OZONE_RECON_SQL_DB_JDBC_URL = - "ozone.recon.sql.db.jdbc.url"; - public static final String OZONE_RECON_TASK_THREAD_COUNT_KEY = "ozone.recon.task.thread.count"; public static final int OZONE_RECON_TASK_THREAD_COUNT_DEFAULT = 5;