Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 13 additions & 14 deletions fe/src/main/java/org/apache/doris/load/Load.java
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,8 @@
import org.apache.doris.analysis.ColumnSeparator;
import org.apache.doris.analysis.DataDescription;
import org.apache.doris.analysis.DeleteStmt;
import org.apache.doris.analysis.Expr;
import org.apache.doris.analysis.ImportColumnDesc;
import org.apache.doris.analysis.IsNullPredicate;
import org.apache.doris.analysis.LabelName;
import org.apache.doris.analysis.LiteralExpr;
Expand Down Expand Up @@ -677,33 +679,30 @@ public static void checkAndCreateSource(Database db, DataDescription dataDescrip
source.setColumnNames(columnNames);

// check default value
Map<String, Pair<String, List<String>>> assignColumnToFunction = dataDescription.getColumnToHadoopFunction();
Map<String, Pair<String, List<String>>> columnToHadoopFunction = dataDescription.getColumnToHadoopFunction();
List<ImportColumnDesc> parsedColumnExprList = dataDescription.getParsedColumnExprList();
Map<String, Expr> parsedColumnExprMap = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
for (ImportColumnDesc importColumnDesc : parsedColumnExprList) {
parsedColumnExprMap.put(importColumnDesc.getColumnName(), importColumnDesc.getExpr());
}
for (Column column : tableSchema) {
String columnName = column.getName();
if (columnNames.contains(columnName)) {
continue;
}

if (assignColumnToFunction != null && assignColumnToFunction.containsKey(columnName)) {
if (parsedColumnExprMap.containsKey(columnName)) {
continue;
}

if (column.getDefaultValue() != null || column.isAllowNull()) {
continue;
}

if (deleteFlag && !column.isKey()) {
List<String> args = Lists.newArrayList();
args.add("0");
Pair<String, List<String>> functionPair = new Pair<String, List<String>>("default_value", args);
assignColumnToFunction.put(columnName, functionPair);
continue;
}

throw new DdlException("Column has no default value. column: " + columnName);
}

// check negative for sum aggreate type
// check negative for sum aggregate type
if (dataDescription.isNegative()) {
for (Column column : tableSchema) {
if (!column.isKey() && column.getAggregationType() != AggregateType.SUM) {
Expand All @@ -715,7 +714,7 @@ public static void checkAndCreateSource(Database db, DataDescription dataDescrip
// check hll
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

hll_hash is a kind of hadoop function, so here we use columnToHadoopFunction to check only HLL column is correct.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yep

for (Column column : tableSchema) {
if (column.getDataType() == PrimitiveType.HLL) {
if (assignColumnToFunction != null && !assignColumnToFunction.containsKey(column.getName())) {
if (columnToHadoopFunction != null && !columnToHadoopFunction.containsKey(column.getName())) {
throw new DdlException("Hll column is not assigned. column:" + column.getName());
}
}
Expand All @@ -727,9 +726,9 @@ public static void checkAndCreateSource(Database db, DataDescription dataDescrip
for (String columnName : columnNames) {
columnNameMap.put(columnName, columnName);
}
if (assignColumnToFunction != null) {
if (columnToHadoopFunction != null) {
columnToFunction = Maps.newHashMap();
for (Entry<String, Pair<String, List<String>>> entry : assignColumnToFunction.entrySet()) {
for (Entry<String, Pair<String, List<String>>> entry : columnToHadoopFunction.entrySet()) {
String mappingColumnName = entry.getKey();
if (!nameToTableColumn.containsKey(mappingColumnName)) {
throw new DdlException("Mapping column is not in table. column: " + mappingColumnName);
Expand Down