Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -146,6 +146,24 @@ public ArrayList<SlotDescriptor> getMaterializedSlots() {
return result;
}

public ArrayList<SlotId> getMaterializedSlotIds() {
ArrayList<SlotId> result = Lists.newArrayList();
for (SlotDescriptor slot : slots) {
if (slot.isMaterialized()) {
result.add(slot.getId());
}
}
return result;
}

public ArrayList<SlotId> getAllSlotIds() {
ArrayList<SlotId> result = Lists.newArrayList();
for (SlotDescriptor slot : slots) {
result.add(slot.getId());
}
return result;
}

/**
* Return slot descriptor corresponding to column referenced in the context
* of tupleDesc, or null if no such reference exists.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -94,12 +94,12 @@ public HashJoinNode(PlanNodeId id, PlanNode outer, PlanNode inner, TableRef inne

if (joinOp.equals(JoinOperator.LEFT_ANTI_JOIN) || joinOp.equals(JoinOperator.LEFT_SEMI_JOIN)
|| joinOp.equals(JoinOperator.NULL_AWARE_LEFT_ANTI_JOIN)) {
tupleIds.addAll(outer.getTupleIds());
tupleIds.addAll(outer.getOutputTupleIds());
} else if (joinOp.equals(JoinOperator.RIGHT_ANTI_JOIN) || joinOp.equals(JoinOperator.RIGHT_SEMI_JOIN)) {
tupleIds.addAll(inner.getTupleIds());
tupleIds.addAll(inner.getOutputTupleIds());
} else {
tupleIds.addAll(outer.getTupleIds());
tupleIds.addAll(inner.getTupleIds());
tupleIds.addAll(outer.getOutputTupleIds());
tupleIds.addAll(inner.getOutputTupleIds());
}

for (Expr eqJoinPredicate : eqJoinConjuncts) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,12 +77,12 @@ public JoinNodeBase(PlanNodeId id, String planNodeName, StatisticalType statisti

joinOp = innerRef.getJoinOp();
if (joinOp.equals(JoinOperator.FULL_OUTER_JOIN)) {
nullableTupleIds.addAll(outer.getTupleIds());
nullableTupleIds.addAll(inner.getTupleIds());
nullableTupleIds.addAll(outer.getOutputTupleIds());
nullableTupleIds.addAll(inner.getOutputTupleIds());
} else if (joinOp.equals(JoinOperator.LEFT_OUTER_JOIN)) {
nullableTupleIds.addAll(inner.getTupleIds());
nullableTupleIds.addAll(inner.getOutputTupleIds());
} else if (joinOp.equals(JoinOperator.RIGHT_OUTER_JOIN)) {
nullableTupleIds.addAll(outer.getTupleIds());
nullableTupleIds.addAll(outer.getOutputTupleIds());
}
this.isMark = this.innerRef != null && innerRef.isMark();
}
Expand Down
45 changes: 45 additions & 0 deletions fe/fe-core/src/main/java/org/apache/doris/planner/ScanNode.java
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@
import org.apache.doris.thrift.TScanRangeLocations;

import com.google.common.base.MoreObjects;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Range;
Expand All @@ -64,6 +65,7 @@
import org.apache.logging.log4j.Logger;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
Expand All @@ -84,6 +86,9 @@ public abstract class ScanNode extends PlanNode {
protected List<TScanRangeLocations> scanRangeLocations = Lists.newArrayList();
protected PartitionInfo partitionsInfo = null;

// create a mapping between output slot's id and project expr
Map<SlotId, Expr> outputSlotToProjectExpr = new HashMap<>();

public ScanNode(PlanNodeId id, TupleDescriptor desc, String planNodeName, StatisticalType statisticalType) {
super(id, desc.getId().asList(), planNodeName, statisticalType);
this.desc = desc;
Expand Down Expand Up @@ -603,6 +608,7 @@ public void setOutputSmap(ExprSubstitutionMap smap, Analyzer analyzer) {
}
newRhs.add(new SlotRef(slotDesc));
allOutputSlotIds.add(slotDesc.getId());
outputSlotToProjectExpr.put(slotDesc.getId(), rhsExpr);
} else {
newRhs.add(rhs.get(i));
}
Expand All @@ -614,6 +620,45 @@ public void setOutputSmap(ExprSubstitutionMap smap, Analyzer analyzer) {
}
}

@Override
public void initOutputSlotIds(Set<SlotId> requiredSlotIdSet, Analyzer analyzer) {
if (outputTupleDesc != null && requiredSlotIdSet != null) {
Preconditions.checkNotNull(outputSmap);
ArrayList<SlotId> materializedSlotIds = outputTupleDesc.getMaterializedSlotIds();
Preconditions.checkState(projectList != null && projectList.size() <= materializedSlotIds.size(),
"projectList's size should be less than materializedSlotIds's size");
boolean hasNewSlot = false;
if (projectList.size() < materializedSlotIds.size()) {
// need recreate projectList based on materializedSlotIds
hasNewSlot = true;
}

// find new project expr from outputSmap based on requiredSlotIdSet
ArrayList<SlotId> allSlots = outputTupleDesc.getAllSlotIds();
for (SlotId slotId : requiredSlotIdSet) {
if (!materializedSlotIds.contains(slotId) && allSlots.contains(slotId)) {
SlotDescriptor slot = outputTupleDesc.getSlot(slotId.asInt());
for (Expr expr : outputSmap.getRhs()) {
if (expr instanceof SlotRef && ((SlotRef) expr).getSlotId() == slotId) {
slot.setIsMaterialized(true);
outputSlotToProjectExpr.put(slotId, expr.getSrcSlotRef());
hasNewSlot = true;
}
}
}
}

if (hasNewSlot) {
// recreate the project list
projectList.clear();
materializedSlotIds = outputTupleDesc.getMaterializedSlotIds();
for (SlotId slotId : materializedSlotIds) {
projectList.add(outputSlotToProjectExpr.get(slotId));
}
}
}
}

public List<TupleId> getOutputTupleIds() {
if (outputTupleDesc != null) {
return Lists.newArrayList(outputTupleDesc.getId());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ public class JoinCostEvaluationTest {
public void setUp() {
new Expectations() {
{
node.getTupleIds();
node.getOutputTupleIds();
result = Lists.newArrayList();
node.getTblRefIds();
result = Lists.newArrayList();
Expand Down Expand Up @@ -114,7 +114,7 @@ public void testConstructHashTableSpace() {
double nodeArrayLen = 6144;
new Expectations() {
{
node.getTupleIds();
node.getOutputTupleIds();
result = new ArrayList<>(Collections.nCopies(rhsNodeTupleIdNum, 0));
}
};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -139,9 +139,9 @@ public void testJoinReorderWithTwoTuple1(@Injectable PlannerContext context,
result = eqSlot2;
eqSlot2.isBoundByTupleIds(new ArrayList<>());
result = true;
scanNode1.getTupleIds();
scanNode1.getOutputTupleIds();
result = Lists.newArrayList();
scanNode2.getTupleIds();
scanNode2.getOutputTupleIds();
result = Lists.newArrayList();
scanNode1.getOutputSmap();
result = null;
Expand Down Expand Up @@ -239,9 +239,9 @@ public void testJoinReorderWithTwoTuple2(@Injectable PlannerContext context,
result = eqSlot2;
eqSlot2.isBoundByTupleIds(new ArrayList<>());
result = true;
scanNode1.getTupleIds();
scanNode1.getOutputTupleIds();
result = Lists.newArrayList();
scanNode2.getTupleIds();
scanNode2.getOutputTupleIds();
result = Lists.newArrayList();
scanNode1.getOutputSmap();
result = null;
Expand Down Expand Up @@ -337,9 +337,9 @@ public void testJoinReorderWithTwoTuple3(@Injectable PlannerContext context,
result = eqSlot2;
eqSlot2.isBoundByTupleIds(new ArrayList<>());
result = true;
scanNode1.getTupleIds();
scanNode1.getOutputTupleIds();
result = Lists.newArrayList();
scanNode2.getTupleIds();
scanNode2.getOutputTupleIds();
result = Lists.newArrayList();
scanNode1.getOutputSmap();
result = null;
Expand Down Expand Up @@ -487,11 +487,11 @@ public void testKeepRightTableRefOnLeftJoin(@Injectable PlannerContext context,
result = eqT1Slot1;
eqBinaryPredicate3.getChild(1);
result = eqT3Slot3;
scanNode1.getTupleIds();
scanNode1.getOutputTupleIds();
result = tupleIds1;
scanNode2.getTupleIds();
scanNode2.getOutputTupleIds();
result = tupleIds2;
scanNode3.getTupleIds();
scanNode3.getOutputTupleIds();
result = tupleId3;
scanNode1.getOutputSmap();
result = null;
Expand Down Expand Up @@ -649,11 +649,11 @@ public void testKeepRightTableRefOnRightJoin(@Injectable PlannerContext context,
result = eqT1Slot1;
eqBinaryPredicate3.getChild(1);
result = eqT3Slot3;
scanNode1.getTupleIds();
scanNode1.getOutputTupleIds();
result = tupleIds1;
scanNode2.getTupleIds();
scanNode2.getOutputTupleIds();
result = tupleIds2;
scanNode3.getTupleIds();
scanNode3.getOutputTupleIds();
result = tupleId3;
scanNode1.getOutputSmap();
result = null;
Expand Down Expand Up @@ -830,13 +830,13 @@ public void testMultiInnerJoinReorderAvoidCrossJoin(@Injectable PlannerContext c
scanNode4.getTblRefIds();
result = tupleIds4;

scanNode1.getTupleIds();
scanNode1.getOutputTupleIds();
result = tupleIds1;
scanNode2.getTupleIds();
scanNode2.getOutputTupleIds();
result = tupleIds2;
scanNode3.getTupleIds();
scanNode3.getOutputTupleIds();
result = tupleIds3;
scanNode4.getTupleIds();
scanNode4.getOutputTupleIds();
result = tupleIds4;
scanNode1.getOutputSmap();
result = null;
Expand Down Expand Up @@ -1037,13 +1037,13 @@ public void testMultiInnerJoinMultiJoinPredicateReorder(@Injectable PlannerConte
scanNode4.getTblRefIds();
result = tupleIds4;

scanNode1.getTupleIds();
scanNode1.getOutputTupleIds();
result = tupleIds1;
scanNode2.getTupleIds();
scanNode2.getOutputTupleIds();
result = tupleIds2;
scanNode3.getTupleIds();
scanNode3.getOutputTupleIds();
result = tupleIds3;
scanNode4.getTupleIds();
scanNode4.getOutputTupleIds();
result = tupleIds4;
scanNode1.getOutputSmap();
result = null;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,3 +10,6 @@
-- !select4 --
0.0

-- !select5 --
3

Original file line number Diff line number Diff line change
Expand Up @@ -455,4 +455,69 @@ suite("test_inlineview_with_project") {
`t1`.`plate_id` = `t2`.`material_id`) t5 ON
1 = 1
)res;"""

sql """DROP TABLE IF EXISTS `dr_user_test_t1`;"""
sql """CREATE TABLE `dr_user_test_t1` (
`caseId` varchar(500) NULL
) ENGINE=OLAP
UNIQUE KEY(`caseId`)
COMMENT 'OLAP'
DISTRIBUTED BY HASH(`caseId`) BUCKETS 16
PROPERTIES (
"replication_allocation" = "tag.location.default: 1"
);"""

sql """DROP TABLE IF EXISTS `dr_user_test_t2`;"""
sql """CREATE TABLE `dr_user_test_t2` (
`id` varchar(500) NULL COMMENT 'id',
`caseId` varchar(500) NULL,
`content` text NULL,
`timestamp` datetime NULL
) ENGINE=OLAP
UNIQUE KEY(`id`)
COMMENT 'OLAP'
DISTRIBUTED BY HASH(`id`) BUCKETS 16
PROPERTIES (
"replication_allocation" = "tag.location.default: 1"
);"""

sql """insert into dr_user_test_t1 values('1'),('2'),('3');"""
sql """insert into dr_user_test_t2 values('1','1','1','2020-02-02 22:22:22'), ('2','2','2','2020-02-02 22:22:22'), ('3','3','3','2020-02-02 22:22:22');"""

qt_select5 """
SELECT COUNT(*)
FROM (WITH test_01 AS
(SELECT caseId,
count(judgementDate_labelObject)
FROM
(SELECT CASE_COLUMN_TABLE.caseId AS caseId ,
`judgementDate_labelObject`
FROM
(SELECT CASE_ID_TABLE.caseId ,
JSON_OBJECT('id', `judgementDate_TABLE`.`judgementDateId`, 'content', `judgementDate_TABLE`.`judgementDate`) AS `judgementDate_labelObject`
FROM
(SELECT DISTINCT caseId
FROM dr_user_test_t1) CASE_ID_TABLE
LEFT JOIN
(SELECT caseId,
id AS `judgementDateId`,
(CASE
WHEN `timestamp` IS NOT NULL THEN
to_date(`timestamp`)
ELSE content END) AS `judgementDate`
FROM dr_user_test_t2) `judgementDate_TABLE`
ON CASE_ID_TABLE.caseId = `judgementDate_TABLE`.caseId
LEFT JOIN
(SELECT caseId,
id AS `xx`,
content AS `xxx`
FROM dr_user_test_t2) `xxxx`
ON CASE_ID_TABLE.caseId = `xxxx`.caseId) CASE_COLUMN_TABLE) AGG_RESULT
GROUP BY caseId)
SELECT caseId
FROM test_01 ) TOTAL;
"""

sql """DROP TABLE IF EXISTS `dr_user_test_t1`;"""
sql """DROP TABLE IF EXISTS `dr_user_test_t2`;"""
}