Skip to content
This repository was archived by the owner on May 12, 2021. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
32 commits
Select commit Hold shift + click to select a range
98ab7a5
exposing jmx port for monitoring the threads, cpu and memory
RajashekharInmobi Jul 18, 2018
d3b41b3
Fix for RNA-10440: Lens driver not picking hadoop conf
RajashekharInmobi Aug 16, 2018
69e9e64
added the files
RajashekharInmobi Aug 16, 2018
616aeec
Merge remote-tracking branch 'upstream/master'
RajashekharInmobi Aug 16, 2018
3dffffb
Adding support for Partition Columns to queried without giving additi…
RajashekharInmobi Aug 27, 2018
7783450
reverting old change
RajashekharInmobi Aug 27, 2018
149a192
resolved imports
RajashekharInmobi Aug 27, 2018
c7aa785
added Example for supporting the non-timed partition as filter while …
Sep 10, 2018
787c7e2
Merge remote-tracking branch 'upstream/master'
Sep 20, 2018
1dc444c
Merge remote-tracking branch 'upstream/master'
Sep 20, 2018
c0cb51a
fixed the yaml
RajashekharInmobi Sep 20, 2018
65174d2
licence
RajashekharInmobi Sep 20, 2018
dbbf4f5
Merge remote-tracking branch 'upstream/master'
RajashekharInmobi Sep 21, 2018
8e921d5
Merge remote-tracking branch 'upstream/master'
RajashekharInmobi Jun 11, 2019
0519d9d
made few changes to the code for azure deployment
RajashekharInmobi Jul 2, 2019
bc6f628
Fixed the test case, removed instances where Prepare was hitting the …
RajashekharInmobi Jul 3, 2019
2d6fb3d
reversed few changes
RajashekharInmobi Jul 8, 2019
0caecd7
removing not needed commits
RajashekharInmobi Jul 8, 2019
2ce78dc
undid the unit test cases
RajashekharInmobi Jul 9, 2019
2400d8e
made one more change
RajashekharInmobi Jul 9, 2019
5fa2800
fixed code review comments
RajashekharInmobi Jul 15, 2019
926b604
fixed code review comments
RajashekharInmobi Jul 15, 2019
353dd56
fixed code review comments
RajashekharInmobi Jul 15, 2019
b183d21
Merge branch 'master' of https://github.com/RajashekharInmobi/lens
RajashekharInmobi Jul 16, 2019
fc4358e
made some changes
RajashekharInmobi Jul 16, 2019
75c440d
Merge branch 'master' of https://github.com/RajashekharInmobi/lens
RajashekharInmobi Jul 16, 2019
fe57a2e
added the files
RajashekharInmobi Jul 16, 2019
0cdd720
added the code review fix
RajashekharInmobi Jul 19, 2019
dc67c27
added a unit test case
RajashekharInmobi Jul 22, 2019
8d3e015
added the test case
RajashekharInmobi Jul 22, 2019
af1f5d4
fixed unit test case
RajashekharInmobi Jul 22, 2019
24783d6
fixed unit test case
RajashekharInmobi Jul 22, 2019
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -1830,30 +1830,19 @@ public XFact getXFactTable(FactTable ft) throws LensException {
for (UpdatePeriod updatePeriod : updatePeriods) {
tableNames.add(updatePeriodToTableMap.get(updatePeriod));
}
if (tableNames.size() <= 1) {
XStorageTableElement tblElement = JAXBUtils.getXStorageTableFromHiveTable(
getHiveTable(MetastoreUtil.getFactOrDimtableStorageTableName(cft.getName(), storageName)));
tblElement.setStorageName(storageName);
for (UpdatePeriod p : updatePeriods) {
tblElement.getUpdatePeriods().getUpdatePeriod().add(XUpdatePeriod.valueOf(p.name()));
}
factTable.getStorageTables().getStorageTable().add(tblElement);
} else {
// Multiple storage tables.
XStorageTableElement tblElement = new XStorageTableElement();
tblElement.setStorageName(storageName);
XUpdatePeriods xUpdatePeriods = new XUpdatePeriods();
tblElement.setUpdatePeriods(xUpdatePeriods);
for (Map.Entry entry : updatePeriodToTableMap.entrySet()) {
XUpdatePeriodTableDescriptor updatePeriodTableDescriptor = new XUpdatePeriodTableDescriptor();
updatePeriodTableDescriptor.setTableDesc(getStorageTableDescFromHiveTable(
XStorageTableElement tblElement = new XStorageTableElement();
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can a unit test be added for the bug fixed here?

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

added it.

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

done.

tblElement.setStorageName(storageName);
XUpdatePeriods xUpdatePeriods = new XUpdatePeriods();
tblElement.setUpdatePeriods(xUpdatePeriods);
for (Map.Entry entry : updatePeriodToTableMap.entrySet()) {
XUpdatePeriodTableDescriptor updatePeriodTableDescriptor = new XUpdatePeriodTableDescriptor();
updatePeriodTableDescriptor.setTableDesc(getStorageTableDescFromHiveTable(
this.getHiveTable(MetastoreUtil.getFactOrDimtableStorageTableName(cft.getName(),
(String) entry.getValue()))));
updatePeriodTableDescriptor.setUpdatePeriod(XUpdatePeriod.valueOf(((UpdatePeriod) entry.getKey()).name()));
xUpdatePeriods.getUpdatePeriodTableDescriptor().add(updatePeriodTableDescriptor);
}
factTable.getStorageTables().getStorageTable().add(tblElement);
(String) entry.getValue()))));
updatePeriodTableDescriptor.setUpdatePeriod(XUpdatePeriod.valueOf(((UpdatePeriod) entry.getKey()).name()));
xUpdatePeriods.getUpdatePeriodTableDescriptor().add(updatePeriodTableDescriptor);
}
factTable.getStorageTables().getStorageTable().add(tblElement);
}
fact = factTable;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
import java.text.SimpleDateFormat;
import java.util.*;

import org.apache.lens.api.metastore.XFact;
import org.apache.lens.cube.error.LensCubeErrorCode;
import org.apache.lens.cube.metadata.ExprColumn.ExprSpec;
import org.apache.lens.cube.metadata.ReferencedDimAttribute.ChainRefCol;
Expand All @@ -47,6 +48,7 @@
import org.apache.hadoop.hive.metastore.api.AlreadyExistsException;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat;
import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.metadata.HiveException;
Expand Down Expand Up @@ -89,6 +91,7 @@ public class TestCubeMetastoreClient {
private static final String CUBE_NAME_WITH_PROPS = "testMetastoreCubeWithProps";
private static final String DERIVED_CUBE_NAME = "derivedTestMetastoreCube";
private static final String DERIVED_CUBE_NAME_WITH_PROPS = "derivedTestMetastoreCubeWithProps";
private static final String X_FACT_NAME = "testMetastoreXFact";
private static final Map<String, String> CUBE_PROPERTIES = new HashMap<>();
private static HiveConf conf = new HiveConf(TestCubeMetastoreClient.class);
private static FieldSchema dtPart = new FieldSchema(getDatePartitionKey(), serdeConstants.STRING_TYPE_NAME,
Expand Down Expand Up @@ -144,6 +147,11 @@ public static void setup() throws HiveException, AlreadyExistsException, LensExc
conf.set(LensConfConstants.AUTHORIZER_CLASS, "org.apache.lens.cube.parse.MockAuthorizer");
LensAuthorizer.get().init(conf);

try {
Hive.get().dropDatabase(TestCubeMetastoreClient.class.getSimpleName(), true, true, true);
} catch (NoSuchObjectException e) {
fail();
}
Database database = new Database();
database.setName(TestCubeMetastoreClient.class.getSimpleName());
Hive.get(conf).createDatabase(database);
Expand Down Expand Up @@ -1591,6 +1599,48 @@ public void testCubeFactWithTwoTimedParts() throws Exception {
assertFalse(client.latestPartitionExists(cubeFact.getName(), c1, testDtPart.getName()));
}

@Test(priority = 2)
public void testGetXFactTable() throws Exception {
List<FieldSchema> factColumns = new ArrayList<>(cubeMeasures.size());
for (CubeMeasure measure : cubeMeasures) {
factColumns.add(measure.getColumn());
}

// add one dimension of the cube
factColumns.add(new FieldSchema("zipcode", "int", "zip"));
FieldSchema itPart = new FieldSchema("it", "string", "date part");
FieldSchema etPart = new FieldSchema("et", "string", "date part");
StorageTableDesc s1 = new StorageTableDesc(TextInputFormat.class, HiveIgnoreKeyTextOutputFormat.class,
Lists.newArrayList(getDatePartition(), itPart, etPart), Lists.newArrayList(getDatePartitionKey(),
itPart.getName(), etPart.getName()));

Set<UpdatePeriod> c1Storage = new HashSet<UpdatePeriod>();
c1Storage.add(HOURLY);
Map<String, Set<UpdatePeriod>> updatePeriods = new HashMap<String, Set<UpdatePeriod>>();
updatePeriods.put(c1, c1Storage);

Map<String, StorageTableDesc> storageTableDescs = getHashMap("HOURLY_c1", s1);

Map<UpdatePeriod, String> updatePeriodMap = new HashMap<>();
updatePeriodMap.put(HOURLY, HOURLY.toString() + "_" + c1);

Map<String, Set<String>> storageTablePartitionColumns = new HashMap<String, Set<String>>();
storageTablePartitionColumns.put(c1, new HashSet<>());

Map<String, Map<UpdatePeriod, String>> storageUpdatePeriodMap = new HashMap<String, Map<UpdatePeriod, String>>();

storageUpdatePeriodMap.put(c1, updatePeriodMap);

client.createCubeFactTable(CUBE_NAME_WITH_PROPS, X_FACT_NAME, factColumns, updatePeriods, 0.0d, new HashMap<String,
String>(), storageTableDescs, storageUpdatePeriodMap, storageTablePartitionColumns);

CubeFactTable cubeFact = new CubeFactTable(CUBE_NAME_WITH_PROPS, X_FACT_NAME, factColumns, updatePeriods, 0.0d,
new HashMap<String, String>(), storageUpdatePeriodMap, new HashMap<String, Set<String>>());
XFact xfact = client.getXFactTable(cubeFact);
assertEquals(xfact.getCubeName(), CUBE_NAME_WITH_PROPS);
assertEquals(xfact.getName(), X_FACT_NAME.toLowerCase());
}

@Test(priority = 2)
public void testCubeFactWithThreeTimedParts() throws Exception {
String factName = "testMetastoreFact3TimedParts";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -593,19 +593,25 @@ public DriverQueryPlan explain(AbstractQueryContext explainCtx) throws LensExcep
+ explainKeyword + " ");
}
log.info("{} Explain Query : {}", getFullyQualifiedName(), explainQuery);
QueryContext explainQueryCtx = QueryContext.createContextWithSingleDriver(explainQuery, null,
new LensConf(), explainConf, this, explainCtx.getLensSessionIdentifier(), false);
QueryResult result = null;
try {
result = executeInternal(explainQueryCtx, explainQuery);
if (result.error != null) {
throw new LensException("Query explain failed!", result.error);
}
} finally {
if (result != null) {
result.close();

boolean validateThroughPrepare = explainCtx.getDriverConf(this).getBoolean(JDBC_VALIDATE_THROUGH_PREPARE_OR_EXPLAIN,
DEFAULT_JDBC_VALIDATE_THROUGH_PREPARE_OR_EXPLAIN);
if (validateThroughPrepare) {
QueryContext explainQueryCtx = QueryContext.createContextWithSingleDriver(explainQuery, null, new LensConf(),
explainConf, this, explainCtx.getLensSessionIdentifier(), false);
QueryResult result = null;
try {
result = executeInternal(explainQueryCtx, explainQuery);
if (result.error != null) {
throw new LensException("Query explain failed!", result.error);
}
} finally {
if (result != null) {
result.close();
}
}
}

JDBCQueryPlan jqp = new JDBCQueryPlan(calculateQueryCost(explainCtx));
explainCtx.getDriverContext().setDriverQueryPlan(this, jqp);
return jqp;
Expand All @@ -621,8 +627,8 @@ public void validate(AbstractQueryContext pContext) throws LensException {
if (pContext.getDriverQuery(this) == null) {
throw new NullPointerException("Null driver query for " + pContext.getUserQuery());
}
boolean validateThroughPrepare = pContext.getDriverConf(this).getBoolean(JDBC_VALIDATE_THROUGH_PREPARE,
DEFAULT_JDBC_VALIDATE_THROUGH_PREPARE);
boolean validateThroughPrepare = pContext.getDriverConf(this).getBoolean(JDBC_VALIDATE_THROUGH_PREPARE_OR_EXPLAIN,
DEFAULT_JDBC_VALIDATE_THROUGH_PREPARE_OR_EXPLAIN);
if (validateThroughPrepare) {
PreparedStatement stmt;
// Estimate queries need to get connection from estimate pool to make sure
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -94,11 +94,12 @@ public enum ConnectionPoolProperties {
/** The Constant DEFAULT_JDBC_EXPLAIN_KEYWORD_BEFORE_SELECT. */
public static final boolean DEFAULT_JDBC_EXPLAIN_KEYWORD_BEFORE_SELECT = true;

/** The Constant JDBC_VALIDATE_THROUGH_PREPARE. */
public static final String JDBC_VALIDATE_THROUGH_PREPARE = JDBC_DRIVER_PFX + "validate.through.prepare";
/** The Constant JDBC_VALIDATE_THROUGH_PREPARE_OR_EXPLAIN. */
public static final String JDBC_VALIDATE_THROUGH_PREPARE_OR_EXPLAIN =
JDBC_DRIVER_PFX + "validate.through.prepare.or.explain";

/** The Constant DEFAULT_JDBC_VALIDATE_THROUGH_PREPARE. */
public static final boolean DEFAULT_JDBC_VALIDATE_THROUGH_PREPARE = true;
public static final boolean DEFAULT_JDBC_VALIDATE_THROUGH_PREPARE_OR_EXPLAIN = true;

/** The Constant JDBC_VALIDATE_SKIP_WARNINGS */
public static final String JDBC_VALIDATE_SKIP_WARNINGS = JDBC_DRIVER_PFX + "validate.skip.warnings";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,10 +49,17 @@ public class PreparedQueryContext extends AbstractQueryContext implements Delaye
private final QueryPrepareHandle prepareHandle;

/**
* The prepared time.
* The prepare start time.
*/
@Getter
private final Date preparedTime;
private final Date prepareStartTime;

/**
* The prepare end time.
*/
@Getter
@Setter
private Date prepareEndTime = null;

/**
* The prepared user.
Expand Down Expand Up @@ -94,7 +101,7 @@ public PreparedQueryContext(String query, String user, Configuration conf, Colle
public PreparedQueryContext(String query, String user, Configuration conf, LensConf qconf, Collection<LensDriver>
drivers) {
super(query, user, qconf, conf, drivers, true);
this.preparedTime = new Date();
this.prepareStartTime = new Date();
this.preparedUser = user;
this.prepareHandle = new QueryPrepareHandle(UUID.randomUUID());
this.conf = conf;
Expand All @@ -119,9 +126,9 @@ public int compareTo(Delayed o) {
@Override
public long getDelay(TimeUnit units) {
long delayMillis;
if (this.preparedTime != null) {
if (this.prepareStartTime != null) {
Date now = new Date();
long elapsedMills = now.getTime() - this.preparedTime.getTime();
long elapsedMills = now.getTime() - this.prepareStartTime.getTime();
delayMillis = millisInWeek - elapsedMills;
return units.convert(delayMillis, TimeUnit.MILLISECONDS);
} else {
Expand All @@ -147,7 +154,7 @@ public void updateConf(Map<String, String> confoverlay) {
* @return the lens prepared query
*/
public LensPreparedQuery toPreparedQuery() {
return new LensPreparedQuery(prepareHandle, userQuery, preparedTime, preparedUser, getDriverContext()
return new LensPreparedQuery(prepareHandle, userQuery, prepareStartTime, preparedUser, getDriverContext()
.getSelectedDriver() != null ? getDriverContext().getSelectedDriver().getFullyQualifiedName() : null,
getDriverContext().getSelectedDriverQuery(), lensConf);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
import org.apache.lens.api.query.QueryStatus;
import org.apache.lens.server.api.error.LensException;
import org.apache.lens.server.api.query.FinishedLensQuery;
import org.apache.lens.server.api.query.PreparedQueryContext;
import org.apache.lens.server.api.query.QueryContext;
import org.apache.lens.server.session.LensSessionImpl;
import org.apache.lens.server.util.UtilityMethods;
Expand All @@ -47,6 +48,7 @@
import org.apache.hadoop.conf.Configuration;

import com.google.common.collect.Lists;

import lombok.extern.slf4j.Slf4j;

/**
Expand Down Expand Up @@ -105,6 +107,20 @@ public void createFinishedQueriesTable() throws Exception {
log.warn("Unable to create finished queries table", e);
}
}

public void createPreparedQueriesTable() throws Exception {
String sql = "CREATE TABLE if not exists prepared_queries (handle varchar(255) NOT NULL unique, userquery "
+ "varchar(20000), submitter varchar(255) NOT NULL, timetaken bigint, queryname varchar(255) DEFAULT NULL, "
+ "drivername varchar(10000) DEFAULT NULL, driverquery varchar(1000000), starttime bigint)";
try {
QueryRunner runner = new QueryRunner(ds);
runner.update(sql);
log.info("Created prepared_queries queries table");
} catch (SQLException e) {
log.warn("Unable to create prepared_queries queries table", e);
}
}

public void createFailedAttemptsTable() throws Exception {
String sql = "CREATE TABLE if not exists failed_attempts (handle varchar(255) not null,"
+ "attempt_number int, drivername varchar(10000), progress float, progressmessage varchar(10000), "
Expand Down Expand Up @@ -821,4 +837,35 @@ public boolean deleteActiveSession(LensSessionHandle sessionId) throws LensExcep

return result;
}

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

add update prepare query also

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We don't update the prepared query, we insert only prepared query, so it is not needed.

/**
* DAO method to insert a new Prepared query into Table.
*
* @param preparedQueryContext to be inserted
* @throws SQLException the exception
*/
public void insertPreparedQuery(PreparedQueryContext preparedQueryContext) throws LensException {
String sql = "insert into prepared_queries (handle, userquery, submitter, timetaken, queryname, drivername, "
+ "driverquery, starttime)" + " values (?,?,?,?,?,?,?,?)";
Connection conn = null;
try {
conn = getConnection();
conn.setAutoCommit(false);
QueryRunner runner = new QueryRunner();

long timeTaken =
preparedQueryContext.getPrepareEndTime().getTime() - preparedQueryContext.getPrepareStartTime().getTime();

runner.update(conn, sql, preparedQueryContext.getPrepareHandle().getQueryHandleString(),
preparedQueryContext.getUserQuery(), preparedQueryContext.getSubmittedUser(), timeTaken,
preparedQueryContext.getQueryName(), preparedQueryContext.getDriverContext().getSelectedDriver().toString(),
preparedQueryContext.getSelectedDriverQuery(), preparedQueryContext.getPrepareStartTime().getTime());
conn.commit();
} catch (SQLException e) {
log.error("Failed to insert prepared query into database with error, " + e);
throw new LensException(e);
} finally {
DbUtils.closeQuietly(conn);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,9 @@ public class QueryExecutionServiceImpl extends BaseLensService implements QueryE
/**
* The Constant PREPARED_QUERY_PURGER_COUNTER.
*/
public static final String PREPARED_QUERY_PURGER_COUNTER = "prepared-query-purger-errors";
public static final String PREPARED_QUERY_PURGER_ERROR_COUNTER = "prepared-query-purger-errors";

public static final String PREPARED_QUERY_INSERT_ERROR_COUNTER = "prepared-query-insert-errors";

/**
* The millis in week.
Expand Down Expand Up @@ -1302,13 +1304,13 @@ public void run() {
destroyPreparedQuery(prepared);
log.info("Purged prepared query: {}", prepared.getPrepareHandle());
} catch (LensException e) {
incrCounter(PREPARED_QUERY_PURGER_COUNTER);
incrCounter(PREPARED_QUERY_PURGER_ERROR_COUNTER);
log.error("Error closing prepared query ", e);
} catch (InterruptedException e) {
log.info("PreparedQueryPurger has been interrupted, exiting");
return;
} catch (Exception e) {
incrCounter(PREPARED_QUERY_PURGER_COUNTER);
incrCounter(PREPARED_QUERY_PURGER_ERROR_COUNTER);
log.error("Error in prepared query purger", e);
}
}
Expand Down Expand Up @@ -1415,6 +1417,7 @@ private void initalizeFinishedQueryStore(Configuration conf) {
this.lensServerDao.createFailedAttemptsTable();
this.lensServerDao.createActiveSessionsTable();
this.lensServerDao.createActiveQueriesTable();
this.lensServerDao.createPreparedQueriesTable();
} catch (Exception e) {
log.warn("Unable to create finished query tables, query purger will not purge queries", e);
}
Expand Down Expand Up @@ -2054,9 +2057,10 @@ public QueryPrepareHandle prepare(LensSessionHandle sessionHandle, String query,
acquire(sessionHandle);
prepared = prepareQuery(sessionHandle, query, lensConf, SubmitOp.PREPARE);
prepared.setQueryName(queryName);
prepared.getSelectedDriver().prepare(prepared);
lensServerDao.insertPreparedQuery(prepared);
return prepared.getPrepareHandle();
} catch (LensException e) {
incrCounter(PREPARED_QUERY_INSERT_ERROR_COUNTER);
if (prepared != null) {
destroyPreparedQuery(prepared);
}
Expand Down Expand Up @@ -2087,6 +2091,7 @@ private PreparedQueryContext prepareQuery(LensSessionHandle sessionHandle, Strin
preparedQueries.put(prepared.getPrepareHandle(), prepared);
preparedQueryQueue.add(prepared);
incrCounter(PREPARED_QUERIES_COUNTER);
prepared.setPrepareEndTime(new Date());
return prepared;
}

Expand Down Expand Up @@ -3031,7 +3036,7 @@ public List<QueryPrepareHandle> getAllPreparedQueries(LensSessionHandle sessionH
continue;
}
}
long queryPrepTime = preparedQueryContext.getPreparedTime().getTime();
long queryPrepTime = preparedQueryContext.getPrepareStartTime().getTime();
if (fromTime <= queryPrepTime && queryPrepTime < toTime) {
continue;
}
Expand Down
Loading