diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/StatementBase.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/StatementBase.java index 7f787bba68f3a7..da84fc5c4c3c5b 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/analysis/StatementBase.java +++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/StatementBase.java @@ -51,6 +51,8 @@ public abstract class StatementBase implements ParseNode { private OriginStatement origStmt; + private UserIdentity userInfo; + protected StatementBase() { } /** @@ -169,6 +171,14 @@ public OriginStatement getOrigStmt() { return origStmt; } + public UserIdentity getUserInfo() { + return userInfo; + } + + public void setUserInfo(UserIdentity userInfo) { + this.userInfo = userInfo; + } + /** * Resets the internal analysis state of this node. * For easier maintenance, class members that need to be reset are grouped into diff --git a/fe/fe-core/src/main/java/org/apache/doris/common/FeMetaVersion.java b/fe/fe-core/src/main/java/org/apache/doris/common/FeMetaVersion.java index 0eaf61ec649562..e3ea0901aa8a3b 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/common/FeMetaVersion.java +++ b/fe/fe-core/src/main/java/org/apache/doris/common/FeMetaVersion.java @@ -196,6 +196,8 @@ public final class FeMetaVersion { public static final int VERSION_91 = 91; // for mysql external table support resource public static final int VERSION_92 = 92; + //jira: 4863 for load job support udf + public static final int VERSION_93 = 93; // note: when increment meta version, should assign the latest version to VERSION_CURRENT - public static final int VERSION_CURRENT = VERSION_92; + public static final int VERSION_CURRENT = VERSION_93; } diff --git a/fe/fe-core/src/main/java/org/apache/doris/load/loadv2/BrokerLoadJob.java b/fe/fe-core/src/main/java/org/apache/doris/load/loadv2/BrokerLoadJob.java index a253dbe919c8ed..6e71c56adada56 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/load/loadv2/BrokerLoadJob.java +++ b/fe/fe-core/src/main/java/org/apache/doris/load/loadv2/BrokerLoadJob.java @@ -18,6 +18,7 @@ package org.apache.doris.load.loadv2; import org.apache.doris.analysis.BrokerDesc; +import org.apache.doris.analysis.UserIdentity; import org.apache.doris.catalog.Catalog; import org.apache.doris.catalog.Database; import org.apache.doris.catalog.OlapTable; @@ -70,9 +71,10 @@ public BrokerLoadJob() { this.jobType = EtlJobType.BROKER; } - public BrokerLoadJob(long dbId, String label, BrokerDesc brokerDesc, OriginStatement originStmt) + public BrokerLoadJob(long dbId, String label, BrokerDesc brokerDesc, + OriginStatement originStmt, UserIdentity userInfo) throws MetaNotFoundException { - super(dbId, label, originStmt); + super(dbId, label, originStmt, userInfo); this.timeoutSecond = Config.broker_load_default_timeout_second; this.brokerDesc = brokerDesc; this.jobType = EtlJobType.BROKER; @@ -194,7 +196,8 @@ brokerFileGroups, getDeadlineMs(), execMemLimit, strictMode, transactionId, this, timezone, timeoutSecond); UUID uuid = UUID.randomUUID(); TUniqueId loadId = new TUniqueId(uuid.getMostSignificantBits(), uuid.getLeastSignificantBits()); - task.init(loadId, attachment.getFileStatusByTable(aggKey), attachment.getFileNumByTable(aggKey)); + task.init(loadId, attachment.getFileStatusByTable(aggKey), + attachment.getFileNumByTable(aggKey), getUserInfo()); idToTasks.put(task.getSignature(), task); // idToTasks contains previous LoadPendingTasks, so idToTasks is just used to save all tasks. // use newLoadingTasks to save new created loading tasks and submit them later. diff --git a/fe/fe-core/src/main/java/org/apache/doris/load/loadv2/BulkLoadJob.java b/fe/fe-core/src/main/java/org/apache/doris/load/loadv2/BulkLoadJob.java index 1d649c65627bc0..d9edca1b67a084 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/load/loadv2/BulkLoadJob.java +++ b/fe/fe-core/src/main/java/org/apache/doris/load/loadv2/BulkLoadJob.java @@ -22,6 +22,7 @@ import org.apache.doris.analysis.LoadStmt; import org.apache.doris.analysis.SqlParser; import org.apache.doris.analysis.SqlScanner; +import org.apache.doris.analysis.UserIdentity; import org.apache.doris.catalog.AuthorizationInfo; import org.apache.doris.catalog.Catalog; import org.apache.doris.catalog.Database; @@ -73,6 +74,8 @@ public abstract class BulkLoadJob extends LoadJob { // the expr of columns will be reanalyze when the log is replayed private OriginStatement originStmt; + private UserIdentity userInfo; + // include broker desc and data desc protected BrokerFileGroupAggInfo fileGroupAggInfo = new BrokerFileGroupAggInfo(); protected List commitInfos = Lists.newArrayList(); @@ -86,10 +89,11 @@ public BulkLoadJob() { super(); } - public BulkLoadJob(long dbId, String label, OriginStatement originStmt) throws MetaNotFoundException { + public BulkLoadJob(long dbId, String label, OriginStatement originStmt, UserIdentity userInfo) throws MetaNotFoundException { super(dbId, label); this.originStmt = originStmt; this.authorizationInfo = gatherAuthInfo(); + this.userInfo = userInfo; if (ConnectContext.get() != null) { SessionVariable var = ConnectContext.get().getSessionVariable(); @@ -113,11 +117,11 @@ public static BulkLoadJob fromLoadStmt(LoadStmt stmt) throws DdlException { switch (stmt.getEtlJobType()) { case BROKER: bulkLoadJob = new BrokerLoadJob(db.getId(), stmt.getLabel().getLabelName(), - stmt.getBrokerDesc(), stmt.getOrigStmt()); + stmt.getBrokerDesc(), stmt.getOrigStmt(), stmt.getUserInfo()); break; case SPARK: bulkLoadJob = new SparkLoadJob(db.getId(), stmt.getLabel().getLabelName(), - stmt.getResourceDesc(), stmt.getOrigStmt()); + stmt.getResourceDesc(), stmt.getOrigStmt(), stmt.getUserInfo()); break; case MINI: case DELETE: @@ -296,6 +300,7 @@ public void write(DataOutput out) throws IOException { super.write(out); brokerDesc.write(out); originStmt.write(out); + userInfo.write(out); out.writeInt(sessionVariables.size()); for (Map.Entry entry : sessionVariables.entrySet()) { @@ -325,6 +330,11 @@ public void readFields(DataInput in) throws IOException { // The reason is that it will thrown MetaNotFoundException when the tableId could not be found by tableName. // The origin stmt will be analyzed after the replay is completed. + if (Catalog.getCurrentCatalogJournalVersion() >= FeMetaVersion.VERSION_93) { + userInfo = UserIdentity.read(in); + } else { + userInfo = new UserIdentity("",""); + } if (Catalog.getCurrentCatalogJournalVersion() >= FeMetaVersion.VERSION_66) { int size = in.readInt(); for (int i = 0; i < size; i++) { @@ -338,4 +348,11 @@ public void readFields(DataInput in) throws IOException { } } + public UserIdentity getUserInfo() { + return userInfo; + } + + public void setUserInfo(UserIdentity userInfo) { + this.userInfo = userInfo; + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/load/loadv2/LoadLoadingTask.java b/fe/fe-core/src/main/java/org/apache/doris/load/loadv2/LoadLoadingTask.java index caa6081e7073e7..b08f36622fd509 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/load/loadv2/LoadLoadingTask.java +++ b/fe/fe-core/src/main/java/org/apache/doris/load/loadv2/LoadLoadingTask.java @@ -18,6 +18,7 @@ package org.apache.doris.load.loadv2; import org.apache.doris.analysis.BrokerDesc; +import org.apache.doris.analysis.UserIdentity; import org.apache.doris.catalog.Database; import org.apache.doris.catalog.OlapTable; import org.apache.doris.common.LoadException; @@ -83,9 +84,10 @@ public LoadLoadingTask(Database db, OlapTable table, this.timeoutS = timeoutS; } - public void init(TUniqueId loadId, List> fileStatusList, int fileNum) throws UserException { + public void init(TUniqueId loadId, List> fileStatusList, int fileNum, UserIdentity userInfo) throws UserException { this.loadId = loadId; - planner = new LoadingTaskPlanner(callback.getCallbackId(), txnId, db.getId(), table, brokerDesc, fileGroups, strictMode, timezone, this.timeoutS); + planner = new LoadingTaskPlanner(callback.getCallbackId(), txnId, db.getId(), table, + brokerDesc, fileGroups, strictMode, timezone, this.timeoutS, userInfo); planner.plan(loadId, fileStatusList, fileNum); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/load/loadv2/LoadingTaskPlanner.java b/fe/fe-core/src/main/java/org/apache/doris/load/loadv2/LoadingTaskPlanner.java index ab46d8caeb71cd..c728701f2bdd28 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/load/loadv2/LoadingTaskPlanner.java +++ b/fe/fe-core/src/main/java/org/apache/doris/load/loadv2/LoadingTaskPlanner.java @@ -22,6 +22,7 @@ import org.apache.doris.analysis.DescriptorTable; import org.apache.doris.analysis.SlotDescriptor; import org.apache.doris.analysis.TupleDescriptor; +import org.apache.doris.analysis.UserIdentity; import org.apache.doris.catalog.Catalog; import org.apache.doris.catalog.Column; import org.apache.doris.catalog.OlapTable; @@ -32,6 +33,7 @@ import org.apache.doris.common.UserException; import org.apache.doris.common.util.DebugUtil; import org.apache.doris.load.BrokerFileGroup; +import org.apache.doris.mysql.privilege.PrivPredicate; import org.apache.doris.planner.BrokerScanNode; import org.apache.doris.planner.DataPartition; import org.apache.doris.planner.OlapTableSink; @@ -65,7 +67,7 @@ public class LoadingTaskPlanner { private final List fileGroups; private final boolean strictMode; private final long timeoutS; // timeout of load job, in second - + private UserIdentity userInfo; // Something useful // ConnectContext here is just a dummy object to avoid some NPE problem, like ctx.getDatabase() private Analyzer analyzer = new Analyzer(Catalog.getCurrentCatalog(), new ConnectContext()); @@ -79,7 +81,8 @@ public class LoadingTaskPlanner { public LoadingTaskPlanner(Long loadJobId, long txnId, long dbId, OlapTable table, BrokerDesc brokerDesc, List brokerFileGroups, - boolean strictMode, String timezone, long timeoutS) { + boolean strictMode, String timezone, long timeoutS, + UserIdentity userInfo) { this.loadJobId = loadJobId; this.txnId = txnId; this.dbId = dbId; @@ -89,14 +92,13 @@ public LoadingTaskPlanner(Long loadJobId, long txnId, long dbId, OlapTable table this.strictMode = strictMode; this.analyzer.setTimezone(timezone); this.timeoutS = timeoutS; - - /* - * TODO(cmy): UDF currently belongs to a database. Therefore, before using UDF, - * we need to check whether the user has corresponding permissions on this database. - * But here we have lost user information and therefore cannot check permissions. - * So here we first prohibit users from using UDF in load. If necessary, improve it later. - */ - this.analyzer.setUDFAllowed(false); + this.userInfo = userInfo; + if (Catalog.getCurrentCatalog().getAuth().checkDbPriv(userInfo, + Catalog.getCurrentCatalog().getDb(dbId).getFullName(), PrivPredicate.SELECT)) { + this.analyzer.setUDFAllowed(true); + } else { + this.analyzer.setUDFAllowed(false); + } } public void plan(TUniqueId loadId, List> fileStatusesList, int filesAdded) diff --git a/fe/fe-core/src/main/java/org/apache/doris/load/loadv2/SparkLoadJob.java b/fe/fe-core/src/main/java/org/apache/doris/load/loadv2/SparkLoadJob.java index 9297295748a407..3d3f7b34b07b56 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/load/loadv2/SparkLoadJob.java +++ b/fe/fe-core/src/main/java/org/apache/doris/load/loadv2/SparkLoadJob.java @@ -25,6 +25,7 @@ import org.apache.doris.analysis.SlotDescriptor; import org.apache.doris.analysis.SlotRef; import org.apache.doris.analysis.TupleDescriptor; +import org.apache.doris.analysis.UserIdentity; import org.apache.doris.catalog.Catalog; import org.apache.doris.catalog.Column; import org.apache.doris.catalog.Database; @@ -145,9 +146,10 @@ public SparkLoadJob() { jobType = EtlJobType.SPARK; } - public SparkLoadJob(long dbId, String label, ResourceDesc resourceDesc, OriginStatement originStmt) + public SparkLoadJob(long dbId, String label, ResourceDesc resourceDesc, + OriginStatement originStmt, UserIdentity userInfo) throws MetaNotFoundException { - super(dbId, label, originStmt); + super(dbId, label, originStmt, userInfo); this.resourceDesc = resourceDesc; timeoutSecond = Config.spark_load_default_timeout_second; jobType = EtlJobType.SPARK; diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/ConnectProcessor.java b/fe/fe-core/src/main/java/org/apache/doris/qe/ConnectProcessor.java index b7a905920fa7aa..e2908c98b6e9ce 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/qe/ConnectProcessor.java +++ b/fe/fe-core/src/main/java/org/apache/doris/qe/ConnectProcessor.java @@ -186,6 +186,7 @@ private void handleQuery() { } parsedStmt = stmts.get(i); parsedStmt.setOrigStmt(new OriginStatement(originStmt, i)); + parsedStmt.setUserInfo(ctx.getCurrentUserIdentity()); executor = new StmtExecutor(ctx, parsedStmt); ctx.setExecutor(executor); executor.execute(); diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/StmtExecutor.java b/fe/fe-core/src/main/java/org/apache/doris/qe/StmtExecutor.java index 6cd135faa009ed..b9ac363144f342 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/qe/StmtExecutor.java +++ b/fe/fe-core/src/main/java/org/apache/doris/qe/StmtExecutor.java @@ -404,6 +404,7 @@ public void analyze(TQueryOptions tQueryOptions) throws UserException { try { parsedStmt = SqlParserUtils.getStmt(parser, originStmt.idx); parsedStmt.setOrigStmt(originStmt); + parsedStmt.setUserInfo(context.getCurrentUserIdentity()); } catch (Error e) { LOG.info("error happened when parsing stmt {}, id: {}", originStmt, context.getStmtId(), e); throw new AnalysisException("sql parsing error, please check your sql"); diff --git a/fe/fe-core/src/test/java/org/apache/doris/load/loadv2/BrokerLoadJobTest.java b/fe/fe-core/src/test/java/org/apache/doris/load/loadv2/BrokerLoadJobTest.java index 4b0286c0f68316..10a573223fd233 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/load/loadv2/BrokerLoadJobTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/load/loadv2/BrokerLoadJobTest.java @@ -19,14 +19,17 @@ import org.apache.doris.analysis.BrokerDesc; import org.apache.doris.analysis.DataDescription; -import org.apache.doris.analysis.ImportColumnDesc; import org.apache.doris.analysis.LabelName; import org.apache.doris.analysis.LoadStmt; -import org.apache.doris.analysis.SlotRef; +import org.apache.doris.analysis.Analyzer; +import org.apache.doris.analysis.UserIdentity; import org.apache.doris.catalog.Catalog; import org.apache.doris.catalog.Database; import org.apache.doris.catalog.OlapTable; import org.apache.doris.catalog.Table; +import org.apache.doris.catalog.Column; +import org.apache.doris.catalog.PrimitiveType; +import org.apache.doris.catalog.BrokerTable; import org.apache.doris.common.DdlException; import org.apache.doris.common.MetaNotFoundException; import org.apache.doris.common.jmockit.Deencapsulation; @@ -38,6 +41,9 @@ import org.apache.doris.load.Load; import org.apache.doris.load.Source; import org.apache.doris.metric.MetricRepo; +import org.apache.doris.planner.BrokerScanNode; +import org.apache.doris.planner.OlapTableSink; +import org.apache.doris.planner.PlanFragment; import org.apache.doris.task.MasterTaskExecutor; import org.apache.doris.transaction.TransactionState; @@ -52,12 +58,16 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.UUID; import mockit.Expectations; import mockit.Injectable; import mockit.Mock; import mockit.MockUp; import mockit.Mocked; +import org.apache.doris.thrift.TUniqueId; public class BrokerLoadJobTest { @@ -323,6 +333,48 @@ public void testPendingTaskOnFinished(@Injectable BrokerPendingTaskAttachment at Assert.assertEquals(3, idToTasks.size()); } + @Test + public void testPendingTaskOnFinishedWithUserInfo(@Mocked BrokerPendingTaskAttachment attachment, + @Mocked Catalog catalog, + @Injectable BrokerDesc brokerDesc, + @Injectable LoadTaskCallback callback, + @Injectable Database database, + @Injectable FileGroupAggKey aggKey, + @Mocked OlapTable olapTable, + @Mocked PlanFragment sinkFragment, + @Mocked OlapTableSink olapTableSink, + @Mocked BrokerScanNode scanNode) throws Exception{ + List schema = new ArrayList<>(); + schema.add(new Column("a", PrimitiveType.BIGINT)); + Map properties = new HashMap<>(); + properties.put("broker_name", "test"); + properties.put("path", "hdfs://www.test.com"); + BrokerTable brokerTable = new BrokerTable(123L, "test", schema, properties); + BrokerFileGroup brokerFileGroup = new BrokerFileGroup(brokerTable); + List partitionIds = new ArrayList<>(); + partitionIds.add(123L); + Deencapsulation.setField(brokerFileGroup, "partitionIds", partitionIds); + List fileGroups = Lists.newArrayList(); + fileGroups.add(brokerFileGroup); + UUID uuid = UUID.randomUUID(); + TUniqueId loadId = new TUniqueId(uuid.getMostSignificantBits(), uuid.getLeastSignificantBits()); + LoadLoadingTask task = new LoadLoadingTask(database, olapTable,brokerDesc, fileGroups, + 100, 100,false, 100, callback, "", 100); + try { + UserIdentity userInfo = new UserIdentity("root", "localhost"); + userInfo.setIsAnalyzed(); + task.init(loadId, + attachment.getFileStatusByTable(aggKey), + attachment.getFileNumByTable(aggKey), + userInfo); + LoadingTaskPlanner planner = Deencapsulation.getField(task, "planner"); + Analyzer al = Deencapsulation.getField(planner, "analyzer"); + Assert.assertFalse(al.isUDFAllowed()); + } catch (Exception e) { + e.printStackTrace(); + } + } + @Test public void testLoadingTaskOnFinishedWithUnfinishedTask(@Injectable BrokerLoadingTaskAttachment attachment, @Injectable LoadTask loadTask1, diff --git a/fe/fe-core/src/test/java/org/apache/doris/load/loadv2/SparkLoadJobTest.java b/fe/fe-core/src/test/java/org/apache/doris/load/loadv2/SparkLoadJobTest.java index 7f263d7a1615a0..69538b88640647 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/load/loadv2/SparkLoadJobTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/load/loadv2/SparkLoadJobTest.java @@ -26,6 +26,7 @@ import org.apache.doris.analysis.LabelName; import org.apache.doris.analysis.LoadStmt; import org.apache.doris.analysis.ResourceDesc; +import org.apache.doris.analysis.UserIdentity; import org.apache.doris.catalog.Catalog; import org.apache.doris.catalog.Column; import org.apache.doris.catalog.Database; @@ -217,7 +218,7 @@ public void testExecute(@Mocked Catalog catalog, @Mocked SparkLoadPendingTask pe }; ResourceDesc resourceDesc = new ResourceDesc(resourceName, Maps.newHashMap()); - SparkLoadJob job = new SparkLoadJob(dbId, label, resourceDesc, new OriginStatement(originStmt, 0)); + SparkLoadJob job = new SparkLoadJob(dbId, label, resourceDesc, new OriginStatement(originStmt, 0), new UserIdentity("root", "0.0.0.0")); job.execute(); // check transaction id and id to tasks @@ -228,7 +229,7 @@ public void testExecute(@Mocked Catalog catalog, @Mocked SparkLoadPendingTask pe @Test public void testOnPendingTaskFinished(@Mocked Catalog catalog, @Injectable String originStmt) throws MetaNotFoundException { ResourceDesc resourceDesc = new ResourceDesc(resourceName, Maps.newHashMap()); - SparkLoadJob job = new SparkLoadJob(dbId, label, resourceDesc, new OriginStatement(originStmt, 0)); + SparkLoadJob job = new SparkLoadJob(dbId, label, resourceDesc, new OriginStatement(originStmt, 0), new UserIdentity("root", "0.0.0.0")); SparkPendingTaskAttachment attachment = new SparkPendingTaskAttachment(pendingTaskId); attachment.setAppId(appId); attachment.setOutputPath(etlOutputPath); @@ -247,7 +248,7 @@ private SparkLoadJob getEtlStateJob(String originStmt) throws MetaNotFoundExcept sparkConfigs.put("spark.master", "yarn"); sparkConfigs.put("spark.submit.deployMode", "cluster"); sparkConfigs.put("spark.hadoop.yarn.resourcemanager.address", "127.0.0.1:9999"); - SparkLoadJob job = new SparkLoadJob(dbId, label, null, new OriginStatement(originStmt, 0)); + SparkLoadJob job = new SparkLoadJob(dbId, label, null, new OriginStatement(originStmt, 0), new UserIdentity("root", "0.0.0.0")); job.state = JobState.ETL; job.maxFilterRatio = 0.15; job.transactionId = transactionId; diff --git a/fe/fe-core/src/test/java/org/apache/doris/qe/ConnectProcessorTest.java b/fe/fe-core/src/test/java/org/apache/doris/qe/ConnectProcessorTest.java index 678751b6088114..11bf6f202e465e 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/qe/ConnectProcessorTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/qe/ConnectProcessorTest.java @@ -312,6 +312,25 @@ public void testQuery(@Mocked StmtExecutor executor) throws Exception { Assert.assertEquals(MysqlCommand.COM_QUERY, myContext.getCommand()); } + @Test + public void testQueryWithUserInfo(@Mocked StmtExecutor executor) throws Exception { + ConnectContext ctx = initMockContext(mockChannel(queryPacket), AccessTestUtil.fetchAdminCatalog()); + + ConnectProcessor processor = new ConnectProcessor(ctx); + + // Mock statement executor + new Expectations() { + { + executor.getQueryStatisticsForAuditLog(); + minTimes = 0; + result = statistics; + } + }; + processor.processOnce(); + StmtExecutor er = Deencapsulation.getField(processor, "executor"); + Assert.assertTrue(er.getParsedStmt().getUserInfo() != null); + } + @Test public void testQueryFail(@Mocked StmtExecutor executor) throws Exception { ConnectContext ctx = initMockContext(mockChannel(queryPacket), AccessTestUtil.fetchAdminCatalog()); diff --git a/fe/fe-core/src/test/java/org/apache/doris/qe/StmtExecutorTest.java b/fe/fe-core/src/test/java/org/apache/doris/qe/StmtExecutorTest.java index c87bbf6cbc3aa7..37329c4fce59c6 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/qe/StmtExecutorTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/qe/StmtExecutorTest.java @@ -28,6 +28,7 @@ import org.apache.doris.analysis.ShowAuthorStmt; import org.apache.doris.analysis.ShowStmt; import org.apache.doris.analysis.SqlParser; +import org.apache.doris.analysis.StatementBase; import org.apache.doris.analysis.UseStmt; import org.apache.doris.catalog.Catalog; import org.apache.doris.common.DdlException; @@ -526,6 +527,16 @@ public void testSet(@Mocked SetStmt setStmt, @Mocked SqlParser parser, @Mocked S Assert.assertEquals(QueryState.MysqlStateType.OK, state.getStateType()); } + @Test + public void testStmtWithUserInfo(@Mocked StatementBase stmt, @Mocked ConnectContext context) throws Exception { + StmtExecutor stmtExecutor = new StmtExecutor(ctx, stmt); + Deencapsulation.setField(stmtExecutor, "parsedStmt", null); + Deencapsulation.setField(stmtExecutor, "originStmt", new OriginStatement("show databases;", 1)); + stmtExecutor.execute(); + StatementBase newstmt = (StatementBase)Deencapsulation.getField(stmtExecutor, "parsedStmt"); + Assert.assertTrue(newstmt.getUserInfo() != null); + } + @Test public void testSetFail(@Mocked SetStmt setStmt, @Mocked SqlParser parser, @Mocked SetExecutor executor) throws Exception { new Expectations() {