Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -29,10 +29,13 @@
import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.parse.ASTNode;
import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
import org.apache.hadoop.hive.ql.parse.CalcitePlanner;
import org.reflections.Reflections;

import com.google.common.annotations.VisibleForTesting;

import avro.shaded.com.google.common.collect.Sets;
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We should not use the shaded version.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Fixed.


/**
* Manages the DDL command analyzers.
*/
Expand All @@ -49,11 +52,15 @@ private DDLSemanticAnalyzerFactory() {
int type();
}

private static final String DDL_ROOT = "org.apache.hadoop.hive.ql.ddl";
private static final Map<Integer, Class<? extends BaseSemanticAnalyzer>> TYPE_TO_ANALYZER = new HashMap<>();

static {
Set<Class<? extends BaseSemanticAnalyzer>> analyzerClasses =
new Reflections("org.apache.hadoop.hive.ql.ddl").getSubTypesOf(BaseSemanticAnalyzer.class);
Set<Class<? extends BaseSemanticAnalyzer>> analyzerClasses1 =
new Reflections(DDL_ROOT).getSubTypesOf(BaseSemanticAnalyzer.class);
Set<Class<? extends CalcitePlanner>> analyzerClasses2 =
new Reflections(DDL_ROOT).getSubTypesOf(CalcitePlanner.class);
Set<Class<? extends BaseSemanticAnalyzer>> analyzerClasses = Sets.union(analyzerClasses1, analyzerClasses2);
for (Class<? extends BaseSemanticAnalyzer> analyzerClass : analyzerClasses) {
if (Modifier.isAbstract(analyzerClass.getModifiers())) {
continue;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
* limitations under the License.
*/

package org.apache.hadoop.hive.ql.ddl.view;
package org.apache.hadoop.hive.ql.ddl.view.create;

import java.io.Serializable;
import java.util.List;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
* limitations under the License.
*/

package org.apache.hadoop.hive.ql.ddl.view;
package org.apache.hadoop.hive.ql.ddl.view.create;

import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.ValidTxnWriteIdList;
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

/** Create view DDL operation. */
package org.apache.hadoop.hive.ql.ddl.view.create;
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.hadoop.hive.ql.ddl.view.drop;

import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.ql.QueryState;
import org.apache.hadoop.hive.ql.ddl.DDLWork;
import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
import org.apache.hadoop.hive.ql.exec.TaskFactory;
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
import org.apache.hadoop.hive.ql.hooks.WriteEntity;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.parse.ASTNode;
import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
import org.apache.hadoop.hive.ql.parse.HiveParser;
import org.apache.hadoop.hive.ql.parse.SemanticException;

/**
* Analyzer for drop view commands.
*/
@DDLType(type=HiveParser.TOK_DROPVIEW)
public class DropViewAnalyzer extends BaseSemanticAnalyzer {
public DropViewAnalyzer(QueryState queryState) throws SemanticException {
super(queryState);
}

@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
String viewName = getUnescapedName((ASTNode) root.getChild(0));
boolean ifExists = (root.getFirstChildWithType(HiveParser.TOK_IFEXISTS) != null);

boolean throwException = !ifExists && !HiveConf.getBoolVar(conf, ConfVars.DROP_IGNORES_NON_EXISTENT);
Table view = getTable(viewName, throwException);
if (view != null) {
inputs.add(new ReadEntity(view));
outputs.add(new WriteEntity(view, WriteEntity.WriteType.DDL_EXCLUSIVE));
}

DropViewDesc desc = new DropViewDesc(viewName, ifExists);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
* limitations under the License.
*/

package org.apache.hadoop.hive.ql.ddl.view;
package org.apache.hadoop.hive.ql.ddl.view.drop;

import java.io.Serializable;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
* limitations under the License.
*/

package org.apache.hadoop.hive.ql.ddl.view;
package org.apache.hadoop.hive.ql.ddl.view.drop;

import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
import org.apache.hadoop.hive.ql.ddl.DDLUtils;
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

/** Drop view DDL operation. */
package org.apache.hadoop.hive.ql.ddl.view.drop;
Original file line number Diff line number Diff line change
Expand Up @@ -16,70 +16,82 @@
* limitations under the License.
*/

package org.apache.hadoop.hive.ql.parse;
package org.apache.hadoop.hive.ql.ddl.view.materialized.alter.rebuild;

import org.apache.hadoop.hive.metastore.api.LockState;
import org.apache.hadoop.hive.ql.Context;
import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.QueryState;
import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
import org.apache.hadoop.hive.ql.io.AcidUtils;
import org.apache.hadoop.hive.ql.lockmgr.HiveTxnManager;
import org.apache.hadoop.hive.ql.lockmgr.LockException;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
import org.apache.hadoop.hive.ql.parse.ASTNode;
import org.apache.hadoop.hive.ql.parse.CalcitePlanner;
import org.apache.hadoop.hive.ql.parse.HiveParser;
import org.apache.hadoop.hive.ql.parse.ParseUtils;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
* MaterializedViewRebuildSemanticAnalyzer.
* Rewrites ALTER MATERIALIZED VIEW _mv_name_ REBUILD statement into
* INSERT OVERWRITE TABLE _mv_name_ _mv_query_ .
* Analyzer for alter materialized view rebuild commands.
*/
public class MaterializedViewRebuildSemanticAnalyzer extends CalcitePlanner {
@DDLType(type=HiveParser.TOK_ALTER_MATERIALIZED_VIEW_REBUILD)
public class AlterMaterializedViewRebuildAnalyzer extends CalcitePlanner {
private static final Logger LOG = LoggerFactory.getLogger(AlterMaterializedViewRebuildAnalyzer.class);

private static final Logger LOG =
LoggerFactory.getLogger(MaterializedViewRebuildSemanticAnalyzer.class);


public MaterializedViewRebuildSemanticAnalyzer(QueryState queryState) throws SemanticException {
public AlterMaterializedViewRebuildAnalyzer(QueryState queryState) throws SemanticException {
super(queryState);
}


@Override
public void analyzeInternal(ASTNode ast) throws SemanticException {
public void analyzeInternal(ASTNode root) throws SemanticException {
if (mvRebuildMode != MaterializationRebuildMode.NONE) {
super.analyzeInternal(ast);
super.analyzeInternal(root);
return;
}

String[] qualifiedTableName = getQualifiedTableName((ASTNode) ast.getChild(0));
String[] qualifiedTableName = getQualifiedTableName((ASTNode)root.getChild(0));
String dbDotTable = getDotName(qualifiedTableName);
ASTNode rewrittenAST = getRewrittenAST(qualifiedTableName, dbDotTable);

mvRebuildMode = MaterializationRebuildMode.INSERT_OVERWRITE_REBUILD;
mvRebuildDbName = qualifiedTableName[0];
mvRebuildName = qualifiedTableName[1];

LOG.debug("Rebuilding materialized view " + dbDotTable);
super.analyzeInternal(rewrittenAST);
}

private static final String REWRITTEN_INSERT_STATEMENT = "INSERT OVERWRITE TABLE `%s`.`%s` %s";

private ASTNode getRewrittenAST(String[] qualifiedTableName, String dbDotTable) throws SemanticException {
ASTNode rewrittenAST;
// We need to go lookup the table and get the select statement and then parse it.
try {
Table tab = getTableObjectByName(dbDotTable, true);
if (!tab.isMaterializedView()) {
// Cannot rebuild not materialized view
Table table = getTableObjectByName(dbDotTable, true);
if (!table.isMaterializedView()) {
throw new SemanticException(ErrorMsg.REBUILD_NO_MATERIALIZED_VIEW);
}

// We need to use the expanded text for the materialized view, as it will contain
// the qualified table aliases, etc.
String viewText = tab.getViewExpandedText();
String viewText = table.getViewExpandedText();
if (viewText.trim().isEmpty()) {
throw new SemanticException(ErrorMsg.MATERIALIZED_VIEW_DEF_EMPTY);
}

Context ctx = new Context(queryState.getConf());
rewrittenAST = ParseUtils.parse("insert overwrite table " +
"`" + qualifiedTableName[0] + "`.`" + qualifiedTableName[1] + "` " +
viewText, ctx);
String rewrittenInsertStatement = String.format(REWRITTEN_INSERT_STATEMENT, qualifiedTableName[0],
qualifiedTableName[1], viewText);
rewrittenAST = ParseUtils.parse(rewrittenInsertStatement, ctx);
this.ctx.addRewrittenStatementContext(ctx);

if (!this.ctx.isExplainPlan() && AcidUtils.isTransactionalTable(tab)) {
// Acquire lock for the given materialized view. Only one rebuild per materialized
// view can be triggered at a given time, as otherwise we might produce incorrect
// results if incremental maintenance is triggered.
if (!this.ctx.isExplainPlan() && AcidUtils.isTransactionalTable(table)) {
// Acquire lock for the given materialized view. Only one rebuild per materialized view can be triggered at a
// given time, as otherwise we might produce incorrect results if incremental maintenance is triggered.
HiveTxnManager txnManager = getTxnMgr();
LockState state;
try {
Expand All @@ -95,11 +107,6 @@ public void analyzeInternal(ASTNode ast) throws SemanticException {
} catch (Exception e) {
throw new SemanticException(e);
}
mvRebuildMode = MaterializationRebuildMode.INSERT_OVERWRITE_REBUILD;
mvRebuildDbName = qualifiedTableName[0];
mvRebuildName = qualifiedTableName[1];

LOG.debug("Rebuilding materialized view " + dbDotTable);
super.analyzeInternal(rewrittenAST);
return rewrittenAST;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

/** Rebuild materialized view DDL operation. */
package org.apache.hadoop.hive.ql.ddl.view.materialized.alter.rebuild;
Loading