Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
package org.apache.doris.statistics;

import org.apache.doris.analysis.AnalyzeDBStmt;
import org.apache.doris.analysis.AnalyzeProperties;
import org.apache.doris.analysis.AnalyzeStmt;
import org.apache.doris.analysis.AnalyzeTblStmt;
import org.apache.doris.analysis.DropAnalyzeJobStmt;
Expand Down Expand Up @@ -169,6 +170,13 @@ public void createAnalyze(AnalyzeStmt analyzeStmt, boolean proxy) throws DdlExce

public void createAnalysisJobs(AnalyzeDBStmt analyzeDBStmt, boolean proxy) throws DdlException {
DatabaseIf<TableIf> db = analyzeDBStmt.getDb();
List<AnalysisInfo> analysisInfos = buildAnalysisInfosForDB(db, analyzeDBStmt.getAnalyzeProperties());
if (!analyzeDBStmt.isSync()) {
sendJobId(analysisInfos, proxy);
}
}

public List<AnalysisInfo> buildAnalysisInfosForDB(DatabaseIf<TableIf> db, AnalyzeProperties analyzeProperties) {
List<TableIf> tbls = db.getTables();
List<AnalysisInfo> analysisInfos = new ArrayList<>();
db.readLock();
Expand All @@ -178,27 +186,30 @@ public void createAnalysisJobs(AnalyzeDBStmt analyzeDBStmt, boolean proxy) throw
if (table instanceof View) {
continue;
}
TableName tableName = new TableName(analyzeDBStmt.getCtlIf().getName(), db.getFullName(),
TableName tableName = new TableName(db.getCatalog().getName(), db.getFullName(),
table.getName());
AnalyzeTblStmt analyzeTblStmt = new AnalyzeTblStmt(analyzeDBStmt.getAnalyzeProperties(), tableName,
AnalyzeTblStmt analyzeTblStmt = new AnalyzeTblStmt(analyzeProperties, tableName,
null, db.getId(), table);
try {
analyzeTblStmt.check();
} catch (AnalysisException analysisException) {
throw new DdlException(analysisException.getMessage(), analysisException);
LOG.warn("Failed to build analyze job: {}",
analysisException.getMessage(), analysisException);
}
analyzeStmts.add(analyzeTblStmt);
}
for (AnalyzeTblStmt analyzeTblStmt : analyzeStmts) {
analysisInfos.add(buildAndAssignJob(analyzeTblStmt));
}
if (!analyzeDBStmt.isSync()) {
sendJobId(analysisInfos, proxy);
try {
analysisInfos.add(buildAndAssignJob(analyzeTblStmt));
} catch (DdlException e) {
LOG.warn("Failed to build analyze job: {}",
e.getMessage(), e);
}
}
return analysisInfos;
} finally {
db.readUnlock();
}

}

// Each analyze stmt corresponding to an analysis job.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,14 +56,59 @@ protected void runAfterCatalogReady() {
if (!StatisticsUtil.statsTblAvailable()) {
return;
}
<<<<<<< HEAD
if (Config.enable_auto_collect_statistics) {
analyzePeriodically();
=======
if (!checkAnalyzeTime(LocalTime.now(TimeUtils.getTimeZone().toZoneId()))) {
return;
}

if (!analysisTaskExecutor.idle()) {
return;
}

analyzePeriodically();
if (!Config.enable_full_auto_analyze) {
>>>>>>> 43614cc44f (handle exception properly)
analyzeAutomatically();
}
}

<<<<<<< HEAD
public void autoAnalyzeStats(DdlStmt ddlStmt) {
// TODO Monitor some DDL statements, and then trigger automatic analysis tasks
=======
@SuppressWarnings({"rawtypes", "unchecked"})
private void analyzeAll() {
Set<CatalogIf> catalogs = Env.getCurrentEnv().getCatalogMgr().getCopyOfCatalog();
for (CatalogIf ctl : catalogs) {

Collection<DatabaseIf> dbs = ctl.getAllDbs();
for (DatabaseIf<TableIf> databaseIf : dbs) {
if (StatisticConstants.STATISTICS_DB_BLACK_LIST.contains(databaseIf.getFullName())) {
continue;
}
AnalysisManager analysisManager = Env.getCurrentEnv().getAnalysisManager();
List<AnalysisInfo> analysisInfos = analysisManager.buildAnalysisInfosForDB(databaseIf,
AnalyzeProperties.DEFAULT_PROP);
for (AnalysisInfo analysisInfo : analysisInfos) {
analysisInfo = getReAnalyzeRequiredPart(analysisInfo);
if (analysisInfo == null) {
continue;
}
try {
analysisManager.createSystemAnalysisJob(analysisInfo, analysisTaskExecutor);
} catch (Exception e) {
LOG.warn("Failed to create analysis job", e);
}
}
}

}

analyzePeriodically();
>>>>>>> 43614cc44f (handle exception properly)
}

private void analyzePeriodically() {
Expand Down Expand Up @@ -116,16 +161,24 @@ private void analyzeAutomatically() {
* @return new job info after check
* @throws Throwable failed to check
*/
<<<<<<< HEAD
private AnalysisInfo checkAutomaticJobInfo(AnalysisInfo jobInfo) throws Throwable {
=======
private AnalysisInfo getReAnalyzeRequiredPart(AnalysisInfo jobInfo) {
>>>>>>> 43614cc44f (handle exception properly)
long lastExecTimeInMs = jobInfo.lastExecTimeInMs;
TableIf table = StatisticsUtil
.findTable(jobInfo.catalogName, jobInfo.dbName, jobInfo.tblName);
TableStatistic tblStats = StatisticsRepository.fetchTableLevelStats(table.getId());
TableStatistic tblStats = null;
try {
tblStats = StatisticsRepository.fetchTableLevelStats(table.getId());
} catch (Throwable t) {
LOG.warn("Failed to fetch table stats", t);
return null;
}

if (tblStats == TableStatistic.UNKNOWN) {
LOG.warn("Failed to automatically analyze statistics, "
+ "no corresponding table statistics for job: {}", jobInfo.toString());
throw new DdlException("No corresponding table statistics for automatic job.");
return jobInfo;
}

if (!needReanalyzeTable(table, tblStats)) {
Expand Down Expand Up @@ -156,7 +209,7 @@ private boolean needReanalyzeTable(TableIf table, TableStatistic tblStats) {
}

private void checkAnalyzedPartitions(TableIf table, Set<String> statsPartitions,
Set<String> needRunPartitions, long lastExecTimeInMs) throws DdlException {
Set<String> needRunPartitions, long lastExecTimeInMs) {
for (String statsPartition : statsPartitions) {
Partition partition = table.getPartition(statsPartition);
if (partition == null) {
Expand All @@ -165,12 +218,17 @@ private void checkAnalyzedPartitions(TableIf table, Set<String> statsPartitions,
needRunPartitions.add(statsPartition);
continue;
}
TableStatistic partitionStats = StatisticsRepository
TableStatistic partitionStats = null;
try {
partitionStats = StatisticsRepository
.fetchTableLevelOfPartStats(partition.getId());
if (partitionStats == TableStatistic.UNKNOWN) {
} catch (DdlException e) {
LOG.warn("Failed to fetch part stats", e);
continue;
}
if (needReanalyzePartition(lastExecTimeInMs, partition, partitionStats)) {

if (needReanalyzePartition(lastExecTimeInMs, partition, partitionStats)
|| partitionStats == TableStatistic.UNKNOWN) {
needRunPartitions.add(partition.getName());
}
}
Expand Down