diff --git a/fe/fe-common/src/main/java/org/apache/doris/catalog/MysqlColType.java b/fe/fe-common/src/main/java/org/apache/doris/catalog/MysqlColType.java
index 0bdf85384d4c98..0008f16ab10101 100644
--- a/fe/fe-common/src/main/java/org/apache/doris/catalog/MysqlColType.java
+++ b/fe/fe-common/src/main/java/org/apache/doris/catalog/MysqlColType.java
@@ -22,52 +22,65 @@
// which is also demostrated in
// http://dev.mysql.com/doc/internals/en/com-query-response.html
public enum MysqlColType {
- MYSQL_TYPE_DECIMAL(0, "DECIMAL"),
- MYSQL_TYPE_TINY(1, "TINY INT"),
- MYSQL_TYPE_SHORT(2, "SMALL INT"),
- MYSQL_TYPE_LONG(3, "INT"),
- MYSQL_TYPE_FLOAT(4, "FLOAT"),
- MYSQL_TYPE_DOUBLE(5, "DOUBLE"),
- MYSQL_TYPE_NULL(6, "NULL"),
- MYSQL_TYPE_TIMESTAMP(7, "TIMESTAMP"),
- MYSQL_TYPE_LONGLONG(8, "LONGLONG"),
- MYSQL_TYPE_INT24(9, "INT24"),
- MYSQL_TYPE_DATE(10, "DATE"),
- MYSQL_TYPE_TIME(11, "TIME"),
- MYSQL_TYPE_DATETIME(12, "DATETIME"),
- MYSQL_TYPE_YEAR(13, "YEAR"),
- MYSQL_TYPE_NEWDATE(14, "NEWDATE"),
- MYSQL_TYPE_VARCHAR(15, "VARCHAR"),
- MYSQL_TYPE_BIT(16, "BIT"),
- MYSQL_TYPE_TIMESTAMP2(17, "TIMESTAMP2"),
- MYSQL_TYPE_DATETIME2(18, "DATETIME2"),
- MYSQL_TYPE_TIME2(19, "TIME2"),
- MYSQL_TYPE_JSON(245, "JSON"),
- MYSQL_TYPE_NEWDECIMAL(246, "NEW DECIMAL"),
- MYSQL_TYPE_ENUM(247, "ENUM"),
- MYSQL_TYPE_SET(248, "SET"),
- MYSQL_TYPE_TINY_BLOB(249, "TINY BLOB"),
- MYSQL_TYPE_MEDIUM_BLOB(250, "MEDIUM BLOB"),
- MYSQL_TYPE_LONG_BLOB(251, "LONG BLOB"),
- MYSQL_TYPE_BLOB(252, "BLOB"),
- MYSQL_TYPE_VARSTRING(253, "VAR STRING"),
- MYSQL_TYPE_STRING(254, "STRING"),
- MYSQL_TYPE_GEOMETRY(255, "GEOMETRY"),
- MYSQL_TYPE_MAP(400, "MAP");
+ MYSQL_TYPE_DECIMAL(0, "DECIMAL", "DECIMAL"),
+ MYSQL_TYPE_TINY(1, "TINY INT", "TINYINT"),
+ MYSQL_TYPE_SHORT(2, "SMALL INT", "SMALLINT"),
+ MYSQL_TYPE_LONG(3, "INT", "INTEGER"),
+ MYSQL_TYPE_FLOAT(4, "FLOAT", "FLOAT"),
+ MYSQL_TYPE_DOUBLE(5, "DOUBLE", "DOUBLE"),
+ MYSQL_TYPE_NULL(6, "NULL", "NULL"),
+ MYSQL_TYPE_TIMESTAMP(7, "TIMESTAMP", "TIMESTAMP"),
+ MYSQL_TYPE_LONGLONG(8, "LONGLONG", "BIGINT"),
+ MYSQL_TYPE_INT24(9, "INT24", "INT24"),
+ MYSQL_TYPE_DATE(10, "DATE", "DATE"),
+ MYSQL_TYPE_TIME(11, "TIME", "TIME"),
+ MYSQL_TYPE_DATETIME(12, "DATETIME", "DATETIME"),
+ MYSQL_TYPE_YEAR(13, "YEAR", "YEAR"),
+ MYSQL_TYPE_NEWDATE(14, "NEWDATE", "NEWDATE"),
+ MYSQL_TYPE_VARCHAR(15, "VARCHAR", "VARCHAR"),
+ MYSQL_TYPE_BIT(16, "BIT", "BIT"),
+ MYSQL_TYPE_TIMESTAMP2(17, "TIMESTAMP2", "TIMESTAMP2"),
+ MYSQL_TYPE_DATETIME2(18, "DATETIME2", "DATETIME2"),
+ MYSQL_TYPE_TIME2(19, "TIME2", "TIME2"),
+ MYSQL_TYPE_JSON(245, "JSON", "JSON"),
+ MYSQL_TYPE_NEWDECIMAL(246, "NEW DECIMAL", "NEWDECIMAL"),
+ MYSQL_TYPE_ENUM(247, "ENUM", "CHAR"),
+ MYSQL_TYPE_SET(248, "SET", "CHAR"),
+ MYSQL_TYPE_TINY_BLOB(249, "TINY BLOB", "TINYBLOB"),
+ MYSQL_TYPE_MEDIUM_BLOB(250, "MEDIUM BLOB", "MEDIUMBLOB"),
+ MYSQL_TYPE_LONG_BLOB(251, "LONG BLOB", "LONGBLOB"),
+ MYSQL_TYPE_BLOB(252, "BLOB", "BLOB"),
+ MYSQL_TYPE_VARSTRING(253, "VAR STRING", "VARSTRING"),
+ MYSQL_TYPE_STRING(254, "STRING", "CHAR"),
+ MYSQL_TYPE_GEOMETRY(255, "GEOMETRY", "GEOMETRY"),
+ MYSQL_TYPE_MAP(400, "MAP", "MAP");
- private MysqlColType(int code, String desc) {
+ private MysqlColType(int code, String desc, String jdbcColumnTypeName) {
this.code = code;
this.desc = desc;
+ this.jdbcColumnTypeName = jdbcColumnTypeName;
}
// used in network
private int code;
+
+ // for debug, string description of mysql column type code.
private String desc;
+ // MysqlTypeName to JdbcColumnTypeName, refer to:
+ // https://dev.mysql.com/doc/connector-j/en/connector-j-reference-type-conversions.html
+ // In plsql/Var.defineType(), Plsql Var type will be found through the Mysql type name string.
+ // TODO, supports the correspondence between Doris type and Plsql Var.
+ private final String jdbcColumnTypeName;
+
public int getCode() {
return code;
}
+ public String getJdbcColumnTypeName() {
+ return jdbcColumnTypeName;
+ }
+
@Override
public String toString() {
return desc;
diff --git a/fe/fe-core/pom.xml b/fe/fe-core/pom.xml
index 29846ddf08fa6a..81a0a5e3089851 100644
--- a/fe/fe-core/pom.xml
+++ b/fe/fe-core/pom.xml
@@ -887,7 +887,12 @@ under the License.
truesrc/main/antlr4
- true
+ import DorisParser in PLParser.g4 will prompt warning options { tokenVocab = DorisLexer; }
+ ignored, this is expected behavior, antlr/antlr4#2209, hope to ignore this warning alone,
+ but it seems that can only delete this treatWarningsAsErrors.
+ Other warnings should not be ignored.<-->
+
+ src/main/antlr4/org/apache/doris/nereids
diff --git a/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4 b/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4
index 3943a30ee2a6a1..2e09068aa925a8 100644
--- a/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4
+++ b/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4
@@ -34,6 +34,12 @@ singleStatement
;
statement
+ : statementBase # statementBaseAlias
+ | CALL name=multipartIdentifier LEFT_PAREN (expression (COMMA expression)*)? RIGHT_PAREN #callProcedure
+ | (ALTER | CREATE (OR REPLACE)? | REPLACE) (PROCEDURE | PROC) name=multipartIdentifier LEFT_PAREN .*? RIGHT_PAREN .*? #createProcedure
+ ;
+
+statementBase
: explain? query outFileClause? #statementDefault
| CREATE ROW POLICY (IF NOT EXISTS)? name=identifier
ON table=multipartIdentifier
@@ -103,7 +109,6 @@ statement
| ALTER TABLE table=multipartIdentifier
DROP CONSTRAINT constraintName=errorCapturingIdentifier #dropConstraint
| SHOW CONSTRAINTS FROM table=multipartIdentifier #showConstraint
- | CALL functionName=identifier LEFT_PAREN (expression (COMMA expression)*)? RIGHT_PAREN #callProcedure
;
constraint
diff --git a/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/PLLexer.g4 b/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/PLLexer.g4
new file mode 100644
index 00000000000000..b3515555d0ebe7
--- /dev/null
+++ b/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/PLLexer.g4
@@ -0,0 +1,216 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// https://github.com/apache/hive/blob/master/hplsql/src/main/antlr4/org/apache/hive/hplsql/HPlsql.g4
+// and modified by Doris
+
+lexer grammar PLLexer;
+
+// Notice: If syntax parser fails and the exception error is lexer token mismatch,
+// check whether have conflict between PLLexer and DorisLexer,
+// or the PLLexer.tokens file is not updated.
+import DorisLexer;
+
+// Lexer rules
+ACTION: 'ACTION';
+ALLOCATE: 'ALLOCATE';
+ANSI_NULLS: 'ANSI_NULLS';
+ANSI_PADDING: 'ANSI_PADDING';
+ASSOCIATE: 'ASSOCIATE';
+AVG: 'AVG';
+BATCHSIZE: 'BATCHSIZE';
+BINARY_DOUBLE: 'BINARY_DOUBLE';
+BINARY_FLOAT: 'BINARY_FLOAT';
+BINARY_INTEGER: 'BINARY_INTEGER';
+BIT: 'BIT';
+BODY: 'BODY';
+BREAK: 'BREAK';
+BULK: 'BULK';
+BYTE: 'BYTE';
+CALLER: 'CALLER';
+CASCADE: 'CASCADE';
+CASESPECIFIC: 'CASESPECIFIC';
+CLIENT: 'CLIENT';
+CLOSE: 'CLOSE';
+CLUSTERED: 'CLUSTERED';
+CMP: 'CMP';
+COLLECT: 'COLLECT';
+COLLECTION: 'COLLECTION';
+COMPRESS: 'COMPRESS';
+CONCAT: 'CONCAT';
+CONDITION: 'CONDITION';
+CONSTANT: 'CONSTANT';
+CONTINUE: 'CONTINUE';
+COUNT_BIG: 'COUNT_BIG';
+CREATOR: 'CREATOR';
+CS: 'CS';
+CURRENT_SCHEMA: 'CURRENT_SCHEMA';
+CURSOR: 'CURSOR';
+DAYS: 'DAYS';
+DEC: 'DEC';
+DECLARE: 'DECLARE';
+DEFINED: 'DEFINED';
+DEFINER: 'DEFINER';
+DEFINITION: 'DEFINITION';
+DELIMITED: 'DELIMITED';
+DELIMITER: 'DELIMITER';
+DIAGNOSTICS: 'DIAGNOSTICS';
+DIR: 'DIR';
+DIRECTORY: 'DIRECTORY';
+DISTRIBUTE: 'DISTRIBUTE';
+ELSEIF: 'ELSEIF';
+ELSIF: 'ELSIF';
+ESCAPED: 'ESCAPED';
+EXEC: 'EXEC';
+EXCEPTION: 'EXCEPTION';
+EXCLUSIVE: 'EXCLUSIVE';
+EXIT: 'EXIT';
+FALLBACK: 'FALLBACK';
+FETCH: 'FETCH';
+FILES: 'FILES';
+FOUND: 'FOUND';
+GET: 'GET';
+GO: 'GO';
+HANDLER: 'HANDLER';
+HOST: 'HOST';
+IDENTITY: 'IDENTITY';
+INCLUDE: 'INCLUDE';
+INITRANS: 'INITRANS';
+INOUT: 'INOUT';
+INT2: 'INT2';
+INT4: 'INT4';
+INT8: 'INT8';
+INVOKER: 'INVOKER';
+ISOPEN: 'ISOPEN';
+ITEMS: 'ITEMS';
+KEEP: 'KEEP';
+LANGUAGE: 'LANGUAGE';
+LEAVE: 'LEAVE';
+LOCATOR: 'LOCATOR';
+LOCATORS: 'LOCATORS';
+LOCKS: 'LOCKS';
+LOG: 'LOG';
+LOGGED: 'LOGGED';
+LOGGING: 'LOGGING';
+LOOP: 'LOOP';
+MATCHED: 'MATCHED';
+MAXTRANS: 'MAXTRANS';
+MESSAGE_TEXT: 'MESSAGE_TEXT';
+MICROSECOND: 'MICROSECOND';
+MICROSECONDS: 'MICROSECONDS';
+MULTISET: 'MULTISET';
+NCHAR: 'NCHAR';
+NEW: 'NEW';
+NVARCHAR: 'NVARCHAR';
+NOCOUNT: 'NOCOUNT';
+NOCOMPRESS: 'NOCOMPRESS';
+NOLOGGING: 'NOLOGGING';
+NONE: 'NONE';
+NOTFOUND: 'NOTFOUND';
+NUMERIC: 'NUMERIC';
+NUMBER: 'NUMBER';
+OBJECT: 'OBJECT';
+OFF: 'OFF';
+OUT: 'OUT';
+OWNER: 'OWNER';
+PACKAGE: 'PACKAGE';
+PCTFREE: 'PCTFREE';
+PCTUSED: 'PCTUSED';
+PLS_INTEGER: 'PLS_INTEGER';
+PRECISION: 'PRECISION';
+PRESERVE: 'PRESERVE';
+PRINT: 'PRINT';
+QUALIFY: 'QUALIFY';
+QUERY_BAND: 'QUERY_BAND';
+QUIT: 'QUIT';
+QUOTED_IDENTIFIER: 'QUOTED_IDENTIFIER';
+RAISE: 'RAISE';
+RESIGNAL: 'RESIGNAL';
+RESTRICT: 'RESTRICT';
+RESULT: 'RESULT';
+RESULT_SET_LOCATOR: 'RESULT_SET_LOCATOR';
+RETURN: 'RETURN';
+REVERSE: 'REVERSE';
+ROWTYPE: 'ROWTYPE';
+ROW_COUNT: 'ROW_COUNT';
+RR: 'RR';
+RS: 'RS';
+PWD: 'PWD';
+SECONDS: 'SECONDS';
+SECURITY: 'SECURITY';
+SEGMENT: 'SEGMENT';
+SEL: 'SEL';
+SESSIONS: 'SESSIONS';
+SHARE: 'SHARE';
+SIGNAL: 'SIGNAL';
+SIMPLE_DOUBLE: 'SIMPLE_DOUBLE';
+SIMPLE_FLOAT: 'SIMPLE_FLOAT';
+SIMPLE_INTEGER: 'SIMPLE_INTEGER';
+SMALLDATETIME: 'SMALLDATETIME';
+SQL: 'SQL';
+SQLEXCEPTION: 'SQLEXCEPTION';
+SQLINSERT: 'SQLINSERT';
+SQLSTATE: 'SQLSTATE';
+SQLWARNING: 'SQLWARNING';
+STATISTICS: 'STATISTICS';
+STEP: 'STEP';
+STORED: 'STORED';
+SUBDIR: 'SUBDIR';
+SUBSTRING: 'SUBSTRING';
+SUMMARY: 'SUMMARY';
+SYS_REFCURSOR: 'SYS_REFCURSOR';
+TABLESPACE: 'TABLESPACE';
+TEXTIMAGE_ON: 'TEXTIMAGE_ON';
+TITLE: 'TITLE';
+TOP: 'TOP';
+UR: 'UR';
+VAR: 'VAR';
+VARCHAR2: 'VARCHAR2';
+VARYING: 'VARYING';
+VOLATILE: 'VOLATILE';
+WHILE: 'WHILE';
+WITHOUT: 'WITHOUT';
+XACT_ABORT: 'XACT_ABORT';
+XML: 'XML';
+YES: 'YES';
+
+//Functionswithspecificsyntax
+ACTIVITY_COUNT: 'ACTIVITY_COUNT';
+CUME_DIST: 'CUME_DIST';
+DENSE_RANK: 'DENSE_RANK';
+FIRST_VALUE: 'FIRST_VALUE';
+LAG: 'LAG';
+LAST_VALUE: 'LAST_VALUE';
+LEAD: 'LEAD';
+MAX_PART_STRING: 'MAX_PART_STRING';
+MIN_PART_STRING: 'MIN_PART_STRING';
+MAX_PART_INT: 'MAX_PART_INT';
+MIN_PART_INT: 'MIN_PART_INT';
+MAX_PART_DATE: 'MAX_PART_DATE';
+MIN_PART_DATE: 'MIN_PART_DATE';
+PART_COUNT: 'PART_COUNT';
+PART_LOC: 'PART_LOC';
+RANK: 'RANK';
+ROW_NUMBER: 'ROW_NUMBER';
+STDEV: 'STDEV';
+SYSDATE: 'SYSDATE';
+VARIANCE: 'VARIANCE';
+
+DOT2: '..';
+
+LABEL_PL
+ : ([a-zA-Z] | DIGIT | '_')* ':'
+ ;
diff --git a/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/PLParser.g4 b/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/PLParser.g4
new file mode 100644
index 00000000000000..e2975f65d00d60
--- /dev/null
+++ b/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/PLParser.g4
@@ -0,0 +1,938 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/antlr4/org/apache/hive/hplsql/HPlsql.g4
+// and modified by Doris
+
+// PL/SQL Procedural SQL Extension Grammar
+parser grammar PLParser;
+
+options { tokenVocab = PLLexer; }
+
+import DorisParser;
+
+program : block EOF;
+
+block : ((begin_end_block | stmt) GO?)+ ; // Multiple consecutive blocks/statements
+
+begin_end_block :
+ declare_block? BEGIN block exception_block? block_end
+ ;
+
+single_block_stmt : // Single BEGIN END block (but nested blocks are possible) or single statement
+ BEGIN block exception_block? block_end
+ | stmt SEMICOLON?
+ ;
+
+block_end :
+ {!_input.LT(2).getText().equalsIgnoreCase("TRANSACTION")}? END
+ ;
+
+procedure_block :
+ begin_end_block
+ | stmt+ GO?
+ ;
+
+// diff with http://mail.hplsql.org/doc
+// 1. doris statement include plsql stmt:
+// alter_table_stmt
+// create_database_stmt
+// create_index_stmt
+// create_table_stmt
+// delete_stmt
+// describe_stmt
+// insert_stmt
+// insert_directory_stmt
+// grant_stmt
+// select_stmt
+// update_stmt
+// use_stmt
+// truncate_stmt
+//
+// 2. TODO doris statement add stmt:
+// begin_transaction_stmt
+// end_transaction_stmt
+// commit_stmt
+// rollback_stmt
+//
+// 3. TODO add plsql stmt:
+// cmp_stmt
+// copy_from_local_stmt
+// copy_stmt
+// create_local_temp_table_stmt
+// merge_stmt
+//
+// 4. delete hplsql stmt
+// collect_stats_stmt
+// summary_stmt
+// create_table_type_stmt
+// hive
+doris_statement :
+ statementBase
+ ;
+
+stmt :
+ doris_statement
+ | assignment_stmt
+ | allocate_cursor_stmt
+ | associate_locator_stmt
+ | break_stmt
+ | call_stmt
+ | close_stmt
+ | create_function_stmt // like a simple procedure
+ | create_package_stmt
+ | create_package_body_stmt
+ | create_procedure_stmt
+ | declare_stmt
+ | exec_stmt
+ | exit_stmt
+ | fetch_stmt
+ | for_cursor_stmt
+ | for_range_stmt
+ | if_stmt
+ | include_stmt
+ | get_diag_stmt
+ | leave_stmt
+ | map_object_stmt
+ | open_stmt
+ | print_stmt
+ | quit_stmt
+ | raise_stmt
+ | resignal_stmt
+ | return_stmt
+ | signal_stmt
+ | values_into_stmt
+ | while_stmt
+ | unconditional_loop_stmt
+ | label_stmt
+ | host_pl
+ | null_stmt
+ | expr_stmt
+ | semicolon_stmt // Placed here to allow null statements ;;...
+ ;
+
+semicolon_stmt :
+ SEMICOLON
+ | '@' | '/' // '#' |
+ ;
+
+exception_block : // Exception block
+ EXCEPTION exception_block_item+
+ ;
+
+exception_block_item :
+ WHEN IDENTIFIER THEN block ~(WHEN | END)
+ ;
+
+null_stmt : // NULL statement (no operation)
+ NULL
+ ;
+
+expr_stmt : // Standalone expression
+ {!_input.LT(1).getText().equalsIgnoreCase("GO")}? expr
+ ;
+
+assignment_stmt : // Assignment statement
+ SET set_session_option
+ | SET? assignment_stmt_item (COMMA assignment_stmt_item)*
+ ;
+
+assignment_stmt_item :
+ assignment_stmt_single_item
+ | assignment_stmt_multiple_item
+ | assignment_stmt_select_item
+ | assignment_stmt_collection_item
+ ;
+
+assignment_stmt_single_item :
+ ident_pl COLON? EQ expr
+ | LEFT_PAREN ident_pl RIGHT_PAREN COLON? EQ expr
+ ;
+
+assignment_stmt_collection_item :
+ expr_func COLON EQ expr
+ ;
+
+assignment_stmt_multiple_item :
+ LEFT_PAREN ident_pl (COMMA ident_pl)* RIGHT_PAREN COLON? EQ LEFT_PAREN expr (COMMA expr)* RIGHT_PAREN
+ ;
+
+assignment_stmt_select_item :
+ (ident_pl | (LEFT_PAREN ident_pl (COMMA ident_pl)* RIGHT_PAREN)) COLON? EQ LEFT_PAREN query RIGHT_PAREN
+ ;
+
+allocate_cursor_stmt:
+ ALLOCATE ident_pl CURSOR FOR ((RESULT SET) | PROCEDURE) ident_pl
+ ;
+
+associate_locator_stmt :
+ ASSOCIATE (RESULT SET)? (LOCATOR | LOCATORS) LEFT_PAREN ident_pl (COMMA ident_pl)* RIGHT_PAREN WITH PROCEDURE ident_pl
+ ;
+
+break_stmt :
+ BREAK
+ ;
+
+call_stmt :
+ CALL (expr_dot | expr_func | multipartIdentifier)
+ ;
+
+declare_stmt : // Declaration statement
+ DECLARE declare_stmt_item (COMMA declare_stmt_item)*
+ ;
+
+declare_block : // Declaration block
+ DECLARE declare_stmt_item SEMICOLON (declare_stmt_item SEMICOLON)*
+ ;
+
+declare_block_inplace :
+ declare_stmt_item SEMICOLON (declare_stmt_item SEMICOLON)*
+ ;
+
+declare_stmt_item :
+ declare_cursor_item
+ | declare_condition_item
+ | declare_handler_item
+ | declare_var_item
+ ; // TODO declare_temporary_table_item
+
+declare_var_item :
+ ident_pl (COMMA ident_pl)* AS? dtype dtype_len? dtype_attr* dtype_default?
+ | ident_pl CONSTANT AS? dtype dtype_len? dtype_default
+ ;
+
+declare_condition_item : // Condition declaration
+ ident_pl CONDITION
+ ;
+
+declare_cursor_item : // Cursor declaration
+ (CURSOR ident_pl | ident_pl CURSOR) (cursor_with_return | cursor_without_return)? (IS | AS | FOR) (query | expr )
+ ;
+
+cursor_with_return :
+ WITH RETURN ONLY? (TO (CALLER | CLIENT))?
+ ;
+
+cursor_without_return :
+ WITHOUT RETURN
+ ;
+
+declare_handler_item : // Condition handler declaration
+ (CONTINUE | EXIT) HANDLER FOR (SQLEXCEPTION | SQLWARNING | NOT FOUND | ident_pl) single_block_stmt
+ ;
+
+dtype : // Data types
+ CHAR
+ | BIGINT
+ | BINARY_DOUBLE
+ | BINARY_FLOAT
+ | BINARY_INTEGER
+ | BIT
+ | DATE
+ | DATETIME
+ | DEC
+ | DECIMAL
+ | DOUBLE PRECISION?
+ | FLOAT
+ | INT
+ | INT2
+ | INT4
+ | INT8
+ | INTEGER
+ | NCHAR
+ | NVARCHAR
+ | NUMBER
+ | NUMERIC
+ | PLS_INTEGER
+ | REAL
+ | RESULT_SET_LOCATOR VARYING
+ | SIMPLE_FLOAT
+ | SIMPLE_DOUBLE
+ | SIMPLE_INTEGER
+ | SMALLINT
+ | SMALLDATETIME
+ | STRING
+ | SYS_REFCURSOR
+ | TIMESTAMP
+ | TINYINT
+ | VARCHAR
+ | VARCHAR2
+ | XML
+ | qident ('%' (TYPE | ROWTYPE))? // User-defined or derived data type
+ ;
+
+dtype_len : // Data type length or size specification
+ LEFT_PAREN (INTEGER_VALUE | MAX) (CHAR | BYTE)? (COMMA INTEGER_VALUE)? RIGHT_PAREN
+ ;
+
+dtype_attr :
+ NOT? NULL
+ | CHAR SET ident_pl
+ | NOT? (CASESPECIFIC | CS)
+ ;
+
+dtype_default :
+ COLON? EQ expr
+ | WITH? DEFAULT expr?
+ ;
+
+create_function_stmt :
+ (ALTER | CREATE (OR REPLACE)? | REPLACE) FUNCTION multipartIdentifier create_routine_params? create_function_return (AS | IS)? declare_block_inplace? single_block_stmt
+ ;
+
+create_function_return :
+ (RETURN | RETURNS) dtype dtype_len?
+ ;
+
+create_package_stmt :
+ (ALTER | CREATE (OR REPLACE)? | REPLACE) PACKAGE multipartIdentifier (AS | IS) package_spec END (ident_pl SEMICOLON)?
+ ;
+
+package_spec :
+ package_spec_item SEMICOLON (package_spec_item SEMICOLON)*
+ ;
+
+package_spec_item :
+ declare_stmt_item
+ | FUNCTION ident_pl create_routine_params? create_function_return
+ | (PROCEDURE | PROC) ident_pl create_routine_params?
+ ;
+
+create_package_body_stmt :
+ (ALTER | CREATE (OR REPLACE)? | REPLACE) PACKAGE BODY multipartIdentifier (AS | IS) package_body END (ident_pl SEMICOLON)?
+ ;
+
+package_body :
+ package_body_item SEMICOLON (package_body_item SEMICOLON)*
+ ;
+
+package_body_item :
+ declare_stmt_item
+ | create_function_stmt
+ | create_procedure_stmt
+ ;
+
+create_procedure_stmt :
+ (ALTER | CREATE (OR REPLACE)? | REPLACE) (PROCEDURE | PROC) multipartIdentifier create_routine_params? create_routine_options? (AS | IS)? declare_block_inplace? label_stmt? procedure_block (ident_pl SEMICOLON)?
+ ;
+
+create_routine_params :
+ LEFT_PAREN RIGHT_PAREN
+ | LEFT_PAREN create_routine_param_item (COMMA create_routine_param_item)* RIGHT_PAREN
+ | {!_input.LT(1).getText().equalsIgnoreCase("IS") &&
+ !_input.LT(1).getText().equalsIgnoreCase("AS") &&
+ !(_input.LT(1).getText().equalsIgnoreCase("DYNAMIC") && _input.LT(2).getText().equalsIgnoreCase("RESULT"))
+ }?
+ create_routine_param_item (COMMA create_routine_param_item)*
+ ;
+
+create_routine_param_item :
+ (IN | OUT | INOUT | IN OUT)? ident_pl dtype dtype_len? dtype_attr* dtype_default?
+ | ident_pl (IN | OUT | INOUT | IN OUT)? dtype dtype_len? dtype_attr* dtype_default?
+ ;
+
+create_routine_options :
+ create_routine_option+
+ ;
+create_routine_option :
+ LANGUAGE SQL
+ | SQL SECURITY (CREATOR | DEFINER | INVOKER | OWNER)
+ | DYNAMIC? RESULT SETS INTEGER_VALUE
+ ;
+
+exec_stmt : // EXEC, EXECUTE IMMEDIATE statement
+ (EXEC | EXECUTE) IMMEDIATE? expr (LEFT_PAREN expr_func_params RIGHT_PAREN | expr_func_params)? (INTO IDENTIFIER (COMMA IDENTIFIER)*)? using_clause?
+ ;
+
+if_stmt : // IF statement
+ if_plsql_stmt
+ | if_tsql_stmt
+ | if_bteq_stmt
+ ;
+
+if_plsql_stmt :
+ IF bool_expr THEN block elseif_block* else_block? END IF
+ ;
+
+if_tsql_stmt :
+ IF bool_expr single_block_stmt (ELSE single_block_stmt)?
+ ;
+
+if_bteq_stmt :
+ '.' IF bool_expr THEN single_block_stmt
+ ;
+
+elseif_block :
+ (ELSIF | ELSEIF) bool_expr THEN block
+ ;
+
+else_block :
+ ELSE block
+ ;
+
+include_stmt : // INCLUDE statement
+ INCLUDE (file_name | expr)
+ ;
+
+exit_stmt :
+ EXIT IDENTIFIER? (WHEN bool_expr)?
+ ;
+
+get_diag_stmt : // GET DIAGNOSTICS statement
+ GET DIAGNOSTICS get_diag_stmt_item
+ ;
+
+get_diag_stmt_item :
+ get_diag_stmt_exception_item
+ | get_diag_stmt_rowcount_item
+ ;
+
+get_diag_stmt_exception_item :
+ EXCEPTION INTEGER_VALUE qident EQ MESSAGE_TEXT
+ ;
+
+get_diag_stmt_rowcount_item :
+ qident EQ ROW_COUNT
+ ;
+
+leave_stmt :
+ LEAVE IDENTIFIER?
+ ;
+
+map_object_stmt :
+ MAP OBJECT ident_pl (TO ident_pl)? (AT ident_pl)?
+ ;
+
+open_stmt : // OPEN cursor statement
+ OPEN ident_pl (FOR (query | expr))?
+ ;
+
+fetch_stmt : // FETCH cursor statement
+ FETCH FROM? ident_pl bulk_collect_clause? INTO ident_pl (COMMA ident_pl)* fetch_limit?
+ ;
+
+fetch_limit:
+ LIMIT expr
+ ;
+
+close_stmt : // CLOSE cursor statement
+ CLOSE IDENTIFIER
+ ;
+
+print_stmt : // PRINT statement
+ PRINT expr
+ | PRINT LEFT_PAREN expr RIGHT_PAREN
+ ;
+
+quit_stmt :
+ '.'? QUIT expr?
+ ;
+
+raise_stmt :
+ RAISE
+ ;
+
+resignal_stmt : // RESIGNAL statement
+ RESIGNAL (SQLSTATE VALUE? expr (SET MESSAGE_TEXT EQ expr)? )?
+ ;
+
+return_stmt : // RETURN statement
+ RETURN expr?
+ ;
+
+// Plsql allows setting some local variables, which conflicts with doris set session veriable.
+// First position is matched first, so all sets will be treated as set session veriables first.
+set_session_option :
+ set_doris_session_option
+ | set_current_schema_option
+ | set_mssql_session_option
+ | set_teradata_session_option
+ ;
+
+set_doris_session_option :
+ (GLOBAL | LOCAL | SESSION)? ident_pl EQ ident_pl
+ ;
+
+set_current_schema_option :
+ ((CURRENT? SCHEMA) | CURRENT_SCHEMA) EQ? expr
+ ;
+
+set_mssql_session_option :
+ ( ANSI_NULLS
+ | ANSI_PADDING
+ | NOCOUNT
+ | QUOTED_IDENTIFIER
+ | XACT_ABORT )
+ (ON | OFF)
+ ;
+
+set_teradata_session_option :
+ QUERY_BAND EQ (expr | NONE) UPDATE? FOR (TRANSACTION | SESSION)
+ ;
+
+signal_stmt : // SIGNAL statement
+ SIGNAL ident_pl
+ ;
+
+values_into_stmt : // VALUES INTO statement
+ VALUES LEFT_PAREN? expr (COMMA expr)* RIGHT_PAREN? INTO LEFT_PAREN? ident_pl (COMMA ident_pl)* RIGHT_PAREN?
+ ;
+
+while_stmt : // WHILE loop statement
+ WHILE bool_expr (DO | LOOP | THEN | BEGIN) block END (WHILE | LOOP)?
+ ;
+
+unconditional_loop_stmt : // LOOP .. END LOOP
+ LOOP block END LOOP
+ ;
+
+for_cursor_stmt : // FOR (cursor) statement
+ FOR IDENTIFIER IN LEFT_PAREN? query RIGHT_PAREN? LOOP block END LOOP
+ ;
+
+for_range_stmt : // FOR (Integer range) statement
+ FOR IDENTIFIER IN REVERSE? expr DOT2 expr ((BY | STEP) expr)? LOOP block END LOOP
+ ;
+
+label_stmt :
+ LABEL_PL
+ | LT LT IDENTIFIER GT GT
+ ;
+
+using_clause : // USING var,... clause
+ USING expr (COMMA expr)*
+ ;
+
+bulk_collect_clause :
+ BULK COLLECT
+ ;
+
+bool_expr : // Boolean condition
+ NOT? LEFT_PAREN bool_expr RIGHT_PAREN
+ | bool_expr bool_expr_logical_operator bool_expr
+ | bool_expr_atom
+ ;
+
+bool_expr_atom :
+ bool_expr_unary
+ | bool_expr_binary
+ | expr
+ ;
+
+bool_expr_unary :
+ expr IS NOT? NULL
+ | expr BETWEEN expr AND expr
+ ; // TODO NOT? EXISTS LEFT_PAREN query RIGHT_PAREN, bool_expr_single_in, bool_expr_multi_in
+
+bool_expr_binary :
+ expr bool_expr_binary_operator expr
+ ;
+
+bool_expr_logical_operator :
+ AND
+ | OR
+ ;
+
+bool_expr_binary_operator :
+ EQ
+ | NEQ
+ | LT
+ | LTE
+ | GT
+ | GTE
+ | NOT? (LIKE | RLIKE | REGEXP)
+ ;
+
+expr :
+ expr interval_item
+ | expr (ASTERISK | SLASH) expr
+ | expr (PLUS | SUBTRACT) expr
+ | LEFT_PAREN query RIGHT_PAREN
+ | LEFT_PAREN expr RIGHT_PAREN
+ | expr_interval
+ | expr_concat
+ | expr_dot
+ | expr_case
+ | expr_cursor_attribute
+ | expr_agg_window_func
+ | expr_spec_func
+ | expr_func
+ | expr_atom
+ ;
+
+expr_atom :
+ date_literal
+ | timestamp_literal
+ | bool_literal
+ | qident
+ | string
+ | dec_number
+ | int_number
+ | null_const
+ ;
+
+expr_interval :
+ INTERVAL expr interval_item
+ ;
+interval_item :
+ DAY
+ | DAYS
+ | MICROSECOND
+ | MICROSECONDS
+ | SECOND
+ | SECONDS
+ ;
+
+expr_concat : // String concatenation operator
+ expr_concat_item (DOUBLEPIPES | CONCAT) expr_concat_item ((DOUBLEPIPES | CONCAT) expr_concat_item)*
+ ;
+
+expr_concat_item :
+ LEFT_PAREN expr RIGHT_PAREN
+ | expr_case
+ | expr_agg_window_func
+ | expr_spec_func
+ | expr_dot
+ | expr_func
+ | expr_atom
+ ;
+
+expr_case : // CASE expression
+ expr_case_simple
+ | expr_case_searched
+ ;
+
+expr_case_simple :
+ CASE expr (WHEN expr THEN expr)+ (ELSE expr)? END
+ ;
+
+expr_case_searched :
+ CASE (WHEN bool_expr THEN expr)+ (ELSE expr)? END
+ ;
+
+expr_cursor_attribute :
+ ident_pl '%' (ISOPEN | FOUND | NOTFOUND)
+ ;
+
+expr_agg_window_func :
+ AVG LEFT_PAREN expr_func_all_distinct? expr RIGHT_PAREN expr_func_over_clause?
+ | COUNT LEFT_PAREN ((expr_func_all_distinct? expr) | '*') RIGHT_PAREN expr_func_over_clause?
+ | COUNT_BIG LEFT_PAREN ((expr_func_all_distinct? expr) | '*') RIGHT_PAREN expr_func_over_clause?
+ | CUME_DIST LEFT_PAREN RIGHT_PAREN expr_func_over_clause
+ | DENSE_RANK LEFT_PAREN RIGHT_PAREN expr_func_over_clause
+ | FIRST_VALUE LEFT_PAREN expr RIGHT_PAREN expr_func_over_clause
+ | LAG LEFT_PAREN expr (COMMA expr (COMMA expr)?)? RIGHT_PAREN expr_func_over_clause
+ | LAST_VALUE LEFT_PAREN expr RIGHT_PAREN expr_func_over_clause
+ | LEAD LEFT_PAREN expr (COMMA expr (COMMA expr)?)? RIGHT_PAREN expr_func_over_clause
+ | MAX LEFT_PAREN expr_func_all_distinct? expr RIGHT_PAREN expr_func_over_clause?
+ | MIN LEFT_PAREN expr_func_all_distinct? expr RIGHT_PAREN expr_func_over_clause?
+ | RANK LEFT_PAREN RIGHT_PAREN expr_func_over_clause
+ | ROW_NUMBER LEFT_PAREN RIGHT_PAREN expr_func_over_clause
+ | STDEV LEFT_PAREN expr_func_all_distinct? expr RIGHT_PAREN expr_func_over_clause?
+ | SUM LEFT_PAREN expr_func_all_distinct? expr RIGHT_PAREN expr_func_over_clause?
+ | VAR LEFT_PAREN expr_func_all_distinct? expr RIGHT_PAREN expr_func_over_clause?
+ | VARIANCE LEFT_PAREN expr_func_all_distinct? expr RIGHT_PAREN expr_func_over_clause?
+ ;
+
+expr_func_all_distinct :
+ ALL
+ | DISTINCT
+ ;
+
+order_by_clause :
+ ORDER BY expr (ASC | DESC)? (COMMA expr (ASC | DESC)?)*
+ ;
+
+expr_func_over_clause :
+ OVER LEFT_PAREN expr_func_partition_by_clause? order_by_clause? RIGHT_PAREN
+ ;
+
+expr_func_partition_by_clause :
+ PARTITION BY expr (COMMA expr)*
+ ;
+
+expr_spec_func :
+ ACTIVITY_COUNT
+ | CAST LEFT_PAREN expr AS dtype dtype_len? RIGHT_PAREN
+ | COUNT LEFT_PAREN (expr | '*') RIGHT_PAREN
+ | CURRENT_DATE | CURRENT DATE
+ | (CURRENT_TIMESTAMP | CURRENT TIMESTAMP) (LEFT_PAREN expr RIGHT_PAREN)?
+ | CURRENT_USER | CURRENT USER
+ | MAX_PART_STRING LEFT_PAREN expr (COMMA expr (COMMA expr EQ expr)*)? RIGHT_PAREN
+ | MIN_PART_STRING LEFT_PAREN expr (COMMA expr (COMMA expr EQ expr)*)? RIGHT_PAREN
+ | MAX_PART_INT LEFT_PAREN expr (COMMA expr (COMMA expr EQ expr)*)? RIGHT_PAREN
+ | MIN_PART_INT LEFT_PAREN expr (COMMA expr (COMMA expr EQ expr)*)? RIGHT_PAREN
+ | MAX_PART_DATE LEFT_PAREN expr (COMMA expr (COMMA expr EQ expr)*)? RIGHT_PAREN
+ | MIN_PART_DATE LEFT_PAREN expr (COMMA expr (COMMA expr EQ expr)*)? RIGHT_PAREN
+ | PART_COUNT LEFT_PAREN expr (COMMA expr EQ expr)* RIGHT_PAREN
+ | PART_LOC LEFT_PAREN expr (COMMA expr EQ expr)+ (COMMA expr)? RIGHT_PAREN
+ | TRIM LEFT_PAREN expr RIGHT_PAREN
+ | SUBSTRING LEFT_PAREN expr FROM expr (FOR expr)? RIGHT_PAREN
+ | SYSDATE
+ | USER
+ ;
+
+expr_func :
+ multipartIdentifier LEFT_PAREN expr_func_params? RIGHT_PAREN
+ ;
+
+expr_dot :
+ expr_dot_method_call | expr_dot_property_access
+ ;
+
+expr_dot_method_call :
+ (ident_pl | expr_func) DOT expr_func
+ ;
+
+expr_dot_property_access :
+ (ident_pl | expr_func) DOT ident_pl
+ ;
+
+expr_func_params :
+ func_param (COMMA func_param)*
+ ;
+
+func_param :
+ {!_input.LT(1).getText().equalsIgnoreCase("INTO")}? (ident_pl EQ GT?)? expr
+ ;
+
+host_pl :
+ '!' host_cmd ';' // OS command
+ | host_stmt
+ ;
+
+host_cmd :
+ .*?
+ ;
+
+host_stmt :
+ HOST expr
+ ;
+
+file_name :
+ STRING_LITERAL | ('/' | '.' '/')? qident ('/' qident)*
+ ;
+
+date_literal : // DATE 'YYYY-MM-DD' literal
+ DATE string
+ ;
+
+timestamp_literal : // TIMESTAMP 'YYYY-MM-DD HH:MI:SS.FFF' literal
+ TIMESTAMP string
+ ;
+
+ident_pl :
+ '-'? (IDENTIFIER | non_reserved_words | nonReserved)
+ ;
+
+qident : // qualified identifier e.g: table_name.col_name or db_name._table_name
+ ident_pl ('.'ident_pl)*
+ ;
+
+string : // String literal (single or double quoted)
+ STRING_LITERAL
+ ;
+
+int_number : // Integer (positive or negative)
+ ('-' | '+')? INTEGER_VALUE
+ ;
+
+dec_number : // Decimal number (positive or negative)
+ ('-' | '+')? DECIMAL_VALUE
+ ;
+
+bool_literal : // Boolean literal
+ TRUE
+ | FALSE
+ ;
+
+null_const : // NULL constant
+ NULL
+ ;
+
+non_reserved_words : // Tokens that are not reserved words and can be used as identifiers
+ ACTION
+ | ACTIVITY_COUNT
+ | ALLOCATE
+ | ANSI_NULLS
+ | ANSI_PADDING
+ | ASSOCIATE
+ | AVG
+ | BATCHSIZE
+ | BINARY_DOUBLE
+ | BINARY_FLOAT
+ | BIT
+ | BODY
+ | BREAK
+ | BULK
+ | BYTE
+ | CALLER
+ | CASCADE
+ | CASESPECIFIC
+ | CLIENT
+ | CLOSE
+ | CLUSTERED
+ | CMP
+ | COLLECT
+ | COLLECTION
+ | COMPRESS
+ | CONSTANT
+ | CONCAT
+ | CONDITION
+ | COUNT_BIG
+ | CREATOR
+ | CS
+ | CUME_DIST
+ | CURRENT_DATE
+ | CURRENT_TIMESTAMP
+ | CURRENT_USER
+ | CURSOR
+ | DAYS
+ | DEC
+ | DECLARE
+ | DEFINED
+ | DEFINER
+ | DEFINITION
+ | DELIMITED
+ | DELIMITER
+ | DENSE_RANK
+ | DIAGNOSTICS
+ | DIR
+ | DIRECTORY
+ | DISTRIBUTE
+ | ESCAPED
+ | EXEC
+ | EXCEPTION
+ | EXCLUSIVE
+ | EXIT
+ | FALLBACK
+ | FETCH
+ | FILES
+ | FIRST_VALUE
+ | FOUND
+ | GET
+ | GO
+ | HANDLER
+ | HOST
+ | IDENTITY
+ | INCLUDE
+ | INITRANS
+ | INOUT
+ | INT2
+ | INT4
+ | INT8
+ | INVOKER
+ | ITEMS
+ | ISOPEN
+ | KEEP
+ | KEYS
+ | LAG
+ | LANGUAGE
+ | LAST_VALUE
+ | LEAD
+ | LEAVE
+ | LOCATOR
+ | LOCATORS
+ | LOCKS
+ | LOG
+ | LOGGED
+ | LOGGING
+ | LOOP
+ | MATCHED
+ | MAXTRANS
+ | MESSAGE_TEXT
+ | MICROSECOND
+ | MICROSECONDS
+ | MULTISET
+ | NCHAR
+ | NEW
+ | NVARCHAR
+ | NOCOMPRESS
+ | NOCOUNT
+ | NOLOGGING
+ | NONE
+ | NOTFOUND
+ | NUMERIC
+ | NUMBER
+ | OBJECT
+ | OFF
+ | OUT
+ | OWNER
+ | PACKAGE
+ | PART_COUNT
+ | PART_LOC
+ | PCTFREE
+ | PCTUSED
+ | PRECISION
+ | PRESERVE
+ | PRINT
+ | PWD
+ | QUALIFY
+ | QUERY_BAND
+ | QUIT
+ | QUOTED_IDENTIFIER
+ | RAISE
+ | RANK
+ | RR
+ | RESIGNAL
+ | RESTRICT
+ | RESULT
+ | RESULT_SET_LOCATOR
+ | RETURN
+ | REVERSE
+ | RS
+ | ROW_COUNT
+ | ROW_NUMBER
+ | SECONDS
+ | SECURITY
+ | SEGMENT
+ | SEL
+ | SESSIONS
+ | SHARE
+ | SIGNAL
+ | SIMPLE_DOUBLE
+ | SIMPLE_FLOAT
+ | SMALLDATETIME
+ | SQL
+ | SQLEXCEPTION
+ | SQLINSERT
+ | SQLSTATE
+ | SQLWARNING
+ | STATISTICS
+ | STEP
+ | STDEV
+ | STORED
+ | SUBDIR
+ | SUBSTRING
+ | SUMMARY
+ | SYSDATE
+ | SYS_REFCURSOR
+ | TABLESPACE
+ | TEXTIMAGE_ON
+ | TITLE
+ | TOP
+ | UR
+ | VAR
+ | VARCHAR2
+ | VARYING
+ | VARIANCE
+ | VOLATILE
+ | WHILE
+ | WITHOUT
+ | XML
+ | YES
+ ;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java
index d74b45cbf27b70..c4ae1b005e0e39 100755
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java
@@ -221,6 +221,7 @@
import org.apache.doris.persist.meta.MetaReader;
import org.apache.doris.persist.meta.MetaWriter;
import org.apache.doris.planner.TabletLoadIndexRecorderMgr;
+import org.apache.doris.plsql.metastore.PlsqlManager;
import org.apache.doris.plugin.PluginInfo;
import org.apache.doris.plugin.PluginMgr;
import org.apache.doris.policy.PolicyMgr;
@@ -501,6 +502,8 @@ public class Env {
private StatisticsCleaner statisticsCleaner;
+ private PlsqlManager plsqlManager;
+
private BinlogManager binlogManager;
private BinlogGcer binlogGcer;
@@ -750,6 +753,7 @@ public Env(boolean isCheckpointCatalog) {
this.queryStats = new QueryStats();
this.loadManagerAdapter = new LoadManagerAdapter();
this.hiveTransactionMgr = new HiveTransactionMgr();
+ this.plsqlManager = new PlsqlManager();
this.binlogManager = new BinlogManager();
this.binlogGcer = new BinlogGcer();
this.columnIdFlusher = new ColumnIdFlushDaemon();
@@ -855,6 +859,10 @@ public MetastoreEventsProcessor getMetastoreEventsProcessor() {
return metastoreEventsProcessor;
}
+ public PlsqlManager getPlsqlManager() {
+ return plsqlManager;
+ }
+
// use this to get correct ClusterInfoService instance
public static SystemInfoService getCurrentSystemInfo() {
return getCurrentEnv().getClusterInfo();
@@ -2134,6 +2142,12 @@ public long loadWorkloadSchedPolicy(DataInputStream in, long checksum) throws IO
return checksum;
}
+ public long loadPlsqlProcedure(DataInputStream in, long checksum) throws IOException {
+ plsqlManager = PlsqlManager.read(in);
+ LOG.info("finished replay plsql procedure from image");
+ return checksum;
+ }
+
public long loadSmallFiles(DataInputStream in, long checksum) throws IOException {
smallFileMgr.readFields(in);
LOG.info("finished replay smallFiles from image");
@@ -2418,6 +2432,11 @@ public long saveWorkloadSchedPolicy(CountingDataOutputStream dos, long checksum)
return checksum;
}
+ public long savePlsqlProcedure(CountingDataOutputStream dos, long checksum) throws IOException {
+ Env.getCurrentEnv().getPlsqlManager().write(dos);
+ return checksum;
+ }
+
public long saveSmallFiles(CountingDataOutputStream dos, long checksum) throws IOException {
smallFileMgr.write(dos);
return checksum;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/journal/JournalEntity.java b/fe/fe-core/src/main/java/org/apache/doris/journal/JournalEntity.java
index d74c519407f303..e356bbaa6551aa 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/journal/JournalEntity.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/journal/JournalEntity.java
@@ -117,6 +117,9 @@
import org.apache.doris.persist.TableRenameColumnInfo;
import org.apache.doris.persist.TableStatsDeletionLog;
import org.apache.doris.persist.TruncateTableInfo;
+import org.apache.doris.plsql.metastore.PlsqlPackage;
+import org.apache.doris.plsql.metastore.PlsqlProcedureKey;
+import org.apache.doris.plsql.metastore.PlsqlStoredProcedure;
import org.apache.doris.plugin.PluginInfo;
import org.apache.doris.policy.DropPolicyLog;
import org.apache.doris.policy.Policy;
@@ -860,6 +863,26 @@ public void readFields(DataInput in) throws IOException {
isRead = true;
break;
}
+ case OperationType.OP_ADD_PLSQL_STORED_PROCEDURE: {
+ data = PlsqlStoredProcedure.read(in);
+ isRead = true;
+ break;
+ }
+ case OperationType.OP_DROP_PLSQL_STORED_PROCEDURE: {
+ data = PlsqlProcedureKey.read(in);
+ isRead = true;
+ break;
+ }
+ case OperationType.OP_ADD_PLSQL_PACKAGE: {
+ data = PlsqlPackage.read(in);
+ isRead = true;
+ break;
+ }
+ case OperationType.OP_DROP_PLSQL_PACKAGE: {
+ data = PlsqlProcedureKey.read(in);
+ isRead = true;
+ break;
+ }
case OperationType.OP_ALTER_DATABASE_PROPERTY: {
data = AlterDatabasePropertyInfo.read(in);
isRead = true;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/mysql/MysqlChannel.java b/fe/fe-core/src/main/java/org/apache/doris/mysql/MysqlChannel.java
index 8e7c5f79ffd0ba..71eaf59863de61 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/mysql/MysqlChannel.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/mysql/MysqlChannel.java
@@ -469,6 +469,18 @@ public void sendOnePacket(ByteBuffer packet) throws IOException {
}
}
+ public void sendOnePacket(Object[] rows) throws IOException {
+ ByteBuffer packet;
+ serializer.reset();
+ for (Object value : rows) {
+ byte[] bytes = String.valueOf(value).getBytes();
+ serializer.writeVInt(bytes.length);
+ serializer.writeBytes(bytes);
+ }
+ packet = serializer.toByteBuffer();
+ sendOnePacket(packet);
+ }
+
public void sendAndFlush(ByteBuffer packet) throws IOException {
sendOnePacket(packet);
flush();
diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java
index e427931d5ee057..9cab84711f638f 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java
@@ -70,6 +70,7 @@
import org.apache.doris.nereids.DorisParser.ConstantContext;
import org.apache.doris.nereids.DorisParser.ConstantSeqContext;
import org.apache.doris.nereids.DorisParser.CreateMTMVContext;
+import org.apache.doris.nereids.DorisParser.CreateProcedureContext;
import org.apache.doris.nereids.DorisParser.CreateRowPolicyContext;
import org.apache.doris.nereids.DorisParser.CreateTableContext;
import org.apache.doris.nereids.DorisParser.CteContext;
@@ -341,6 +342,7 @@
import org.apache.doris.nereids.trees.plans.commands.Constraint;
import org.apache.doris.nereids.trees.plans.commands.CreateMTMVCommand;
import org.apache.doris.nereids.trees.plans.commands.CreatePolicyCommand;
+import org.apache.doris.nereids.trees.plans.commands.CreateProcedureCommand;
import org.apache.doris.nereids.trees.plans.commands.CreateTableCommand;
import org.apache.doris.nereids.trees.plans.commands.DeleteFromCommand;
import org.apache.doris.nereids.trees.plans.commands.DeleteFromUsingCommand;
@@ -372,6 +374,7 @@
import org.apache.doris.nereids.trees.plans.commands.info.DistributionDescriptor;
import org.apache.doris.nereids.trees.plans.commands.info.DropMTMVInfo;
import org.apache.doris.nereids.trees.plans.commands.info.FixedRangePartition;
+import org.apache.doris.nereids.trees.plans.commands.info.FuncNameInfo;
import org.apache.doris.nereids.trees.plans.commands.info.InPartition;
import org.apache.doris.nereids.trees.plans.commands.info.IndexDefinition;
import org.apache.doris.nereids.trees.plans.commands.info.LessThanPartition;
@@ -621,7 +624,7 @@ public SimpleColumnDefinition visitSimpleColumnDef(SimpleColumnDefContext ctx) {
* @param ctx context
* @return originSql
*/
- private String getOriginSql(ParserRuleContext ctx) {
+ public String getOriginSql(ParserRuleContext ctx) {
int startIndex = ctx.start.getStartIndex();
int stopIndex = ctx.stop.getStopIndex();
org.antlr.v4.runtime.misc.Interval interval = new org.antlr.v4.runtime.misc.Interval(startIndex, stopIndex);
@@ -2097,7 +2100,7 @@ public Expression visitArraySlice(ArraySliceContext ctx) {
}
@Override
- public UnboundSlot visitColumnReference(ColumnReferenceContext ctx) {
+ public Expression visitColumnReference(ColumnReferenceContext ctx) {
// todo: handle quoted and unquoted
return UnboundSlot.quoted(ctx.getText());
}
@@ -3246,11 +3249,25 @@ public Object visitSample(SampleContext ctx) {
@Override
public Object visitCallProcedure(CallProcedureContext ctx) {
- String functionName = ctx.functionName.getText();
+ List nameParts = visitMultipartIdentifier(ctx.name);
+ FuncNameInfo procedureName = new FuncNameInfo(nameParts);
List arguments = ctx.expression().stream()
.map(this::typedVisit)
.collect(ImmutableList.toImmutableList());
- UnboundFunction unboundFunction = new UnboundFunction(functionName, arguments);
- return new CallCommand(unboundFunction);
+ UnboundFunction unboundFunction = new UnboundFunction(procedureName.getDb(), procedureName.getName(),
+ true, arguments);
+ return new CallCommand(unboundFunction, getOriginSql(ctx));
+ }
+
+ @Override
+ public LogicalPlan visitCreateProcedure(CreateProcedureContext ctx) {
+ List nameParts = visitMultipartIdentifier(ctx.name);
+ FuncNameInfo procedureName = new FuncNameInfo(nameParts);
+ return ParserUtils.withOrigin(ctx, () -> {
+ LogicalPlan createProcedurePlan;
+ createProcedurePlan = new CreateProcedureCommand(procedureName, getOriginSql(ctx),
+ ctx.REPLACE() != null);
+ return createProcedurePlan;
+ });
}
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/NereidsParser.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/NereidsParser.java
index 8e01ce2e4b2d05..0ee3f5d068f9ce 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/NereidsParser.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/NereidsParser.java
@@ -24,11 +24,13 @@
import org.apache.doris.nereids.DorisParser;
import org.apache.doris.nereids.StatementContext;
import org.apache.doris.nereids.glue.LogicalPlanAdapter;
+import org.apache.doris.nereids.parser.plsql.PLSqlLogicalPlanBuilder;
import org.apache.doris.nereids.trees.expressions.Expression;
import org.apache.doris.nereids.trees.plans.logical.LogicalPlan;
import org.apache.doris.nereids.types.DataType;
import org.apache.doris.plugin.DialectConverterPlugin;
import org.apache.doris.plugin.PluginMgr;
+import org.apache.doris.qe.ConnectContext;
import org.apache.doris.qe.SessionVariable;
import com.google.common.collect.Lists;
@@ -102,6 +104,9 @@ private List parseSQLWithDialect(String sql,
}
}
+ if (ConnectContext.get().isRunProcedure()) {
+ return parseSQL(sql, new PLSqlLogicalPlanBuilder());
+ }
// fallback if any exception occurs before
return parseSQL(sql);
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/plsql/PLSqlLogicalPlanBuilder.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/plsql/PLSqlLogicalPlanBuilder.java
new file mode 100644
index 00000000000000..5841c451dd1428
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/plsql/PLSqlLogicalPlanBuilder.java
@@ -0,0 +1,53 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package org.apache.doris.nereids.parser.plsql;
+
+import org.apache.doris.nereids.DorisParser.ColumnReferenceContext;
+import org.apache.doris.nereids.PLParser.MultipartIdentifierContext;
+import org.apache.doris.nereids.analyzer.UnboundSlot;
+import org.apache.doris.nereids.parser.LogicalPlanBuilder;
+import org.apache.doris.nereids.trees.expressions.Expression;
+import org.apache.doris.plsql.Var;
+import org.apache.doris.qe.ConnectContext;
+
+import com.google.common.collect.ImmutableList;
+import org.antlr.v4.runtime.RuleContext;
+
+import java.util.List;
+
+/**
+ * Extends from {@link org.apache.doris.nereids.parser.LogicalPlanBuilder},
+ * just focus on the difference between these query syntax.
+ */
+public class PLSqlLogicalPlanBuilder extends LogicalPlanBuilder {
+
+ public List visitMultipartIdentifier(MultipartIdentifierContext ctx) {
+ return ctx.parts.stream()
+ .map(RuleContext::getText)
+ .collect(ImmutableList.toImmutableList());
+ }
+
+ @Override
+ public Expression visitColumnReference(ColumnReferenceContext ctx) {
+ Var var = ConnectContext.get().getProcedureExec().findVariable(ctx.getText());
+ if (var != null) {
+ return var.toLiteral();
+ }
+ return UnboundSlot.quoted(ctx.getText());
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/PlanType.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/PlanType.java
index 530003b791c106..e338bb03ffb0cf 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/PlanType.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/PlanType.java
@@ -139,5 +139,6 @@ public enum PlanType {
PAUSE_MTMV_COMMAND,
RESUME_MTMV_COMMAND,
CANCEL_MTMV_TASK_COMMAND,
- CALL_COMMAND
+ CALL_COMMAND,
+ CREATE_PROCEDURE_COMMAND
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/CallCommand.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/CallCommand.java
index 0042e65be6cd64..29e0b17228f26d 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/CallCommand.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/CallCommand.java
@@ -36,18 +36,20 @@ public class CallCommand extends Command implements ForwardWithSync {
public static final Logger LOG = LogManager.getLogger(CallCommand.class);
private final UnboundFunction unboundFunction;
+ private final String originSql;
/**
* constructor
*/
- public CallCommand(UnboundFunction unboundFunction) {
+ public CallCommand(UnboundFunction unboundFunction, String originSql) {
super(PlanType.CALL_COMMAND);
this.unboundFunction = Objects.requireNonNull(unboundFunction, "function is null");
+ this.originSql = originSql;
}
@Override
public void run(ConnectContext ctx, StmtExecutor executor) throws Exception {
- CallFunc analyzedFunc = CallFunc.getFunc(ctx.getCurrentUserIdentity(), unboundFunction);
+ CallFunc analyzedFunc = CallFunc.getFunc(ctx, ctx.getCurrentUserIdentity(), unboundFunction, originSql);
analyzedFunc.run();
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/CreateProcedureCommand.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/CreateProcedureCommand.java
new file mode 100644
index 00000000000000..1a11512d96e9ee
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/CreateProcedureCommand.java
@@ -0,0 +1,68 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package org.apache.doris.nereids.trees.plans.commands;
+
+import org.apache.doris.nereids.annotation.Developing;
+import org.apache.doris.nereids.trees.plans.PlanType;
+import org.apache.doris.nereids.trees.plans.commands.info.FuncNameInfo;
+import org.apache.doris.nereids.trees.plans.visitor.PlanVisitor;
+import org.apache.doris.plsql.metastore.PlsqlMetaClient;
+import org.apache.doris.qe.ConnectContext;
+import org.apache.doris.qe.StmtExecutor;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import java.util.Objects;
+
+/**
+ * create table procedure
+ */
+@Developing
+public class CreateProcedureCommand extends Command implements ForwardWithSync {
+ public static final Logger LOG = LogManager.getLogger(CreateProcedureCommand.class);
+ private final FuncNameInfo procedureName;
+ private final String source; // Original SQL, from LogicalPlanBuilder.getOriginSql()
+ private final boolean isForce;
+ private final PlsqlMetaClient client;
+
+ /**
+ * constructor
+ */
+ public CreateProcedureCommand(FuncNameInfo procedureName, String source, boolean isForce) {
+ super(PlanType.CREATE_PROCEDURE_COMMAND);
+ this.client = new PlsqlMetaClient();
+ this.procedureName = Objects.requireNonNull(procedureName, "procedureName is null");
+ this.source = Objects.requireNonNull(source, "source is null");
+ this.isForce = isForce;
+ }
+
+ @Override
+ public void run(ConnectContext ctx, StmtExecutor executor) throws Exception {
+ // TODO, removeCached needs to be synchronized to all Observer FEs.
+ // Even if it is always executed on the Master FE, it still has to deal with Master switching.
+ ctx.getPlSqlOperation().getExec().functions.removeCached(procedureName.toString());
+ client.addPlsqlStoredProcedure(procedureName.getName(), procedureName.getCtl(), procedureName.getDb(),
+ ctx.getQualifiedUser(), source, isForce);
+ }
+
+ @Override
+ public R accept(PlanVisitor visitor, C context) {
+ return visitor.visitCreateProcedureCommand(this, context);
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/call/CallFunc.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/call/CallFunc.java
index a9ba819951446d..4a8cf560c28e77 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/call/CallFunc.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/call/CallFunc.java
@@ -19,6 +19,7 @@
import org.apache.doris.analysis.UserIdentity;
import org.apache.doris.nereids.analyzer.UnboundFunction;
+import org.apache.doris.qe.ConnectContext;
/**
* call function
@@ -28,13 +29,15 @@ public abstract class CallFunc {
/**
* Get the instance of CallFunc
*/
- public static CallFunc getFunc(UserIdentity user, UnboundFunction unboundFunction) {
+ public static CallFunc getFunc(ConnectContext ctx, UserIdentity user, UnboundFunction unboundFunction,
+ String originSql) {
String funcName = unboundFunction.getName().toUpperCase();
switch (funcName) {
- case "EXECUTE_STMT":
+ // TODO, built-in functions require a separate management
+ case "EXECUTE_STMT": // Call built-in functions first
return CallExecuteStmtFunc.create(user, unboundFunction.getArguments());
default:
- throw new IllegalArgumentException("unknown function name: " + funcName);
+ return CallProcedure.create(ctx, originSql);
}
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/call/CallProcedure.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/call/CallProcedure.java
new file mode 100644
index 00000000000000..ece58982d7fa4a
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/call/CallProcedure.java
@@ -0,0 +1,51 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package org.apache.doris.nereids.trees.plans.commands.call;
+
+import org.apache.doris.plsql.executor.PlSqlOperation;
+import org.apache.doris.qe.ConnectContext;
+
+import java.util.Objects;
+
+/**
+ * CallProcedure
+ */
+public class CallProcedure extends CallFunc {
+ private final PlSqlOperation executor;
+ private final ConnectContext ctx;
+ private final String source;
+
+ private CallProcedure(PlSqlOperation executor, ConnectContext ctx, String source) {
+ this.executor = Objects.requireNonNull(executor, "executor is missing");
+ this.ctx = ctx;
+ this.source = source;
+ }
+
+ /**
+ * Create a CallFunc
+ */
+ public static CallFunc create(ConnectContext ctx, String source) {
+ PlSqlOperation plSqlOperation = ctx.getPlSqlOperation();
+ return new CallProcedure(plSqlOperation, ctx, source);
+ }
+
+ @Override
+ public void run() {
+ executor.execute(ctx, source);
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/FuncNameInfo.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/FuncNameInfo.java
new file mode 100644
index 00000000000000..7268a5804beff3
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/FuncNameInfo.java
@@ -0,0 +1,143 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package org.apache.doris.nereids.trees.plans.commands.info;
+
+import org.apache.doris.datasource.InternalCatalog;
+import org.apache.doris.nereids.exceptions.AnalysisException;
+import org.apache.doris.nereids.util.Utils;
+import org.apache.doris.qe.ConnectContext;
+
+import com.google.common.base.Preconditions;
+import com.google.common.base.Strings;
+import com.google.common.collect.Lists;
+
+import java.util.List;
+import java.util.Objects;
+
+/**
+ * procedure, function, package name info
+ */
+public class FuncNameInfo {
+ private final List nameParts;
+ private String ctl;
+ private String db;
+ private final String name;
+
+ /**
+ * FuncNameInfo
+ *
+ * @param parts like [ctl1,db1,name1] or [db1,name1] or [name1]
+ */
+ public FuncNameInfo(List parts) {
+ nameParts = parts;
+ Objects.requireNonNull(parts, "require parts object");
+ int size = parts.size();
+ Preconditions.checkArgument(size > 0, "procedure/function/package name can't be empty");
+ name = parts.get(size - 1).toUpperCase();
+ if (size >= 2) {
+ db = parts.get(size - 2);
+ }
+ if (size >= 3) {
+ ctl = parts.get(size - 3);
+ }
+ }
+
+ /**
+ * FuncNameInfo
+ *
+ * @param ctl catalogName
+ * @param db dbName
+ * @param name funcName
+ */
+ public FuncNameInfo(String ctl, String db, String name) {
+ Objects.requireNonNull(ctl, "require tbl object");
+ Objects.requireNonNull(db, "require db object");
+ Objects.requireNonNull(name, "require name object");
+ this.ctl = ctl;
+ this.db = db;
+ this.name = name.toUpperCase();
+ this.nameParts = Lists.newArrayList(ctl, db, name);
+ }
+
+ /**
+ * FuncNameInfo
+ *
+ * @param name funcName
+ */
+ public FuncNameInfo(String name) {
+ Objects.requireNonNull(name, "require name object");
+ this.name = name.toUpperCase();
+ this.nameParts = Lists.newArrayList(name);
+ }
+
+ /**
+ * analyze procedureNameInfo
+ *
+ * @param ctx ctx
+ */
+ public void analyze(ConnectContext ctx) {
+ if (Strings.isNullOrEmpty(ctl)) {
+ ctl = ctx.getDefaultCatalog();
+ if (Strings.isNullOrEmpty(ctl)) {
+ ctl = InternalCatalog.INTERNAL_CATALOG_NAME;
+ }
+ }
+ if (Strings.isNullOrEmpty(db)) {
+ db = ctx.getDatabase();
+ if (Strings.isNullOrEmpty(db)) {
+ throw new AnalysisException("procedure/function/package name no database selected");
+ }
+ }
+
+ if (Strings.isNullOrEmpty(name)) {
+ throw new AnalysisException("procedure/function/package name is null");
+ }
+ }
+
+ /**
+ * get catalog name
+ *
+ * @return ctlName
+ */
+ public String getCtl() {
+ return ctl == null ? "" : ctl;
+ }
+
+ /**
+ * get db name
+ *
+ * @return dbName
+ */
+ public String getDb() {
+ return db == null ? "" : db;
+ }
+
+ /**
+ * get table name
+ *
+ * @return tableName
+ */
+ public String getName() {
+ return name == null ? "" : name;
+ }
+
+ public String toString() {
+ return nameParts.stream().map(Utils::quoteIfNeeded)
+ .reduce((left, right) -> left + "." + right).orElse("");
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/CommandVisitor.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/CommandVisitor.java
index db14074c06c46f..107cfd0a1a28d5 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/CommandVisitor.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/CommandVisitor.java
@@ -25,6 +25,7 @@
import org.apache.doris.nereids.trees.plans.commands.Command;
import org.apache.doris.nereids.trees.plans.commands.CreateMTMVCommand;
import org.apache.doris.nereids.trees.plans.commands.CreatePolicyCommand;
+import org.apache.doris.nereids.trees.plans.commands.CreateProcedureCommand;
import org.apache.doris.nereids.trees.plans.commands.CreateTableCommand;
import org.apache.doris.nereids.trees.plans.commands.DeleteFromCommand;
import org.apache.doris.nereids.trees.plans.commands.DeleteFromUsingCommand;
@@ -136,4 +137,8 @@ default R visitCancelMTMVTaskCommand(CancelMTMVTaskCommand cancelMTMVTaskCommand
default R visitCallCommand(CallCommand callCommand, C context) {
return visitCommand(callCommand, context);
}
+
+ default R visitCreateProcedureCommand(CreateProcedureCommand createProcedureCommand, C context) {
+ return visitCommand(createProcedureCommand, context);
+ }
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/persist/EditLog.java b/fe/fe-core/src/main/java/org/apache/doris/persist/EditLog.java
index 958277dd6b7492..28b8aa31c0187c 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/persist/EditLog.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/persist/EditLog.java
@@ -75,6 +75,9 @@
import org.apache.doris.meta.MetaContext;
import org.apache.doris.metric.MetricRepo;
import org.apache.doris.mysql.privilege.UserPropertyInfo;
+import org.apache.doris.plsql.metastore.PlsqlPackage;
+import org.apache.doris.plsql.metastore.PlsqlProcedureKey;
+import org.apache.doris.plsql.metastore.PlsqlStoredProcedure;
import org.apache.doris.plugin.PluginInfo;
import org.apache.doris.policy.DropPolicyLog;
import org.apache.doris.policy.Policy;
@@ -1100,6 +1103,22 @@ public static void loadJournal(Env env, Long logId, JournalEntity journal) {
env.getAnalysisManager().replayDeleteAnalysisTask((AnalyzeDeletionLog) journal.getData());
break;
}
+ case OperationType.OP_ADD_PLSQL_STORED_PROCEDURE: {
+ env.getPlsqlManager().replayAddPlsqlStoredProcedure((PlsqlStoredProcedure) journal.getData());
+ break;
+ }
+ case OperationType.OP_DROP_PLSQL_STORED_PROCEDURE: {
+ env.getPlsqlManager().replayDropPlsqlStoredProcedure((PlsqlProcedureKey) journal.getData());
+ break;
+ }
+ case OperationType.OP_ADD_PLSQL_PACKAGE: {
+ env.getPlsqlManager().replayAddPlsqlPackage((PlsqlPackage) journal.getData());
+ break;
+ }
+ case OperationType.OP_DROP_PLSQL_PACKAGE: {
+ env.getPlsqlManager().replayDropPlsqlPackage((PlsqlProcedureKey) journal.getData());
+ break;
+ }
case OperationType.OP_ALTER_DATABASE_PROPERTY: {
AlterDatabasePropertyInfo alterDatabasePropertyInfo = (AlterDatabasePropertyInfo) journal.getData();
LOG.info("replay alter database property: {}", alterDatabasePropertyInfo);
@@ -1716,6 +1735,22 @@ public void dropWorkloadSchedPolicy(long policyId) {
logEdit(OperationType.OP_DROP_WORKLOAD_SCHED_POLICY, new DropWorkloadSchedPolicyOperatorLog(policyId));
}
+ public void logAddPlsqlStoredProcedure(PlsqlStoredProcedure plsqlStoredProcedure) {
+ logEdit(OperationType.OP_ADD_PLSQL_STORED_PROCEDURE, plsqlStoredProcedure);
+ }
+
+ public void logDropPlsqlStoredProcedure(PlsqlProcedureKey plsqlProcedureKey) {
+ logEdit(OperationType.OP_DROP_PLSQL_STORED_PROCEDURE, plsqlProcedureKey);
+ }
+
+ public void logAddPlsqlPackage(PlsqlPackage pkg) {
+ logEdit(OperationType.OP_ADD_PLSQL_PACKAGE, pkg);
+ }
+
+ public void logDropPlsqlPackage(PlsqlProcedureKey plsqlProcedureKey) {
+ logEdit(OperationType.OP_DROP_PLSQL_PACKAGE, plsqlProcedureKey);
+ }
+
public void logAlterStoragePolicy(StoragePolicy storagePolicy) {
logEdit(OperationType.OP_ALTER_STORAGE_POLICY, storagePolicy);
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/persist/OperationType.java b/fe/fe-core/src/main/java/org/apache/doris/persist/OperationType.java
index 0868d7f371bd3e..c88dd02950298e 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/persist/OperationType.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/persist/OperationType.java
@@ -317,7 +317,7 @@ public class OperationType {
public static final short OP_ALTER_WORKLOAD_SCHED_POLICY = 414;
public static final short OP_DROP_WORKLOAD_SCHED_POLICY = 415;
- // query stats 440 ~ 424
+ // query stats 420 ~ 424
public static final short OP_CLEAN_QUERY_STATS = 420;
// update binlog config
@@ -373,6 +373,15 @@ public class OperationType {
public static final short OP_ADD_META_ID_MAPPINGS = 470;
+ // plsql 471 ~ 479
+ public static final short OP_ADD_PLSQL_STORED_PROCEDURE = 471;
+
+ public static final short OP_DROP_PLSQL_STORED_PROCEDURE = 472;
+
+ public static final short OP_ADD_PLSQL_PACKAGE = 473;
+
+ public static final short OP_DROP_PLSQL_PACKAGE = 474;
+
/**
* Get opcode name by op code.
**/
diff --git a/fe/fe-core/src/main/java/org/apache/doris/persist/meta/MetaPersistMethod.java b/fe/fe-core/src/main/java/org/apache/doris/persist/meta/MetaPersistMethod.java
index 2d777a50b624ae..9f4d289d824060 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/persist/meta/MetaPersistMethod.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/persist/meta/MetaPersistMethod.java
@@ -246,6 +246,13 @@ public static MetaPersistMethod create(String name) throws NoSuchMethodException
metaPersistMethod.writeMethod =
Env.class.getDeclaredMethod("saveInsertOverwrite", CountingDataOutputStream.class, long.class);
break;
+ case "plsql":
+ // package and stored procedure use the same method in PlsqlManager.
+ metaPersistMethod.readMethod = Env.class.getDeclaredMethod("loadPlsqlProcedure", DataInputStream.class,
+ long.class);
+ metaPersistMethod.writeMethod = Env.class.getDeclaredMethod("savePlsqlProcedure",
+ CountingDataOutputStream.class, long.class);
+ break;
default:
break;
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/persist/meta/PersistMetaModules.java b/fe/fe-core/src/main/java/org/apache/doris/persist/meta/PersistMetaModules.java
index 32bf866f2288a1..caa35b16c3ff29 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/persist/meta/PersistMetaModules.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/persist/meta/PersistMetaModules.java
@@ -40,7 +40,7 @@ public class PersistMetaModules {
"paloAuth", "transactionState", "colocateTableIndex", "routineLoadJobs", "loadJobV2", "smallFiles",
"plugins", "deleteHandler", "sqlBlockRule", "policy", "globalFunction", "workloadGroups",
"binlogs", "resourceGroups", "AnalysisMgrV2", "AsyncJobManager", "workloadSchedPolicy",
- "insertOverwrite");
+ "insertOverwrite", "plsql");
// Modules in this list is deprecated and will not be saved in meta file. (also should not be in MODULE_NAMES)
public static final ImmutableList DEPRECATED_MODULE_NAMES = ImmutableList.of(
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/Arguments.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/Arguments.java
new file mode 100644
index 00000000000000..58272e755b2944
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/Arguments.java
@@ -0,0 +1,215 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Arguments.java
+// and modified by Doris
+
+package org.apache.doris.plsql;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.GnuParser;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.OptionBuilder;
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.ParseException;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Properties;
+
+public class Arguments {
+ private CommandLine commandLine;
+ private Options options = new Options();
+
+ String execString;
+ String fileName;
+ String main;
+ Map vars = new HashMap();
+
+ public static Arguments script(String str) {
+ Arguments arguments = new Arguments();
+ arguments.parse(new String[] {"-e", str});
+ return arguments;
+ }
+
+ @SuppressWarnings("static-access")
+ public Arguments() {
+ // -e 'query'
+ options.addOption(OptionBuilder
+ .hasArg()
+ .withArgName("quoted-query-string")
+ .withDescription("PL/SQL from command line")
+ .create('e'));
+
+ // -f
+ options.addOption(OptionBuilder
+ .hasArg()
+ .withArgName("filename")
+ .withDescription("PL/SQL from a file")
+ .create('f'));
+
+ // -main entry_point_name
+ options.addOption(OptionBuilder
+ .hasArg()
+ .withArgName("procname")
+ .withDescription("Entry point (procedure or function name)")
+ .create("main"));
+
+ // -hiveconf x=y
+ options.addOption(OptionBuilder
+ .withValueSeparator()
+ .hasArgs(2)
+ .withArgName("property=value")
+ .withLongOpt("hiveconf")
+ .withDescription("Value for given property")
+ .create());
+
+ // Substitution option -d, --define
+ options.addOption(OptionBuilder
+ .withValueSeparator()
+ .hasArgs(2)
+ .withArgName("key=value")
+ .withLongOpt("define")
+ .withDescription("Variable substitution e.g. -d A=B or --define A=B")
+ .create('d'));
+
+ // Substitution option --hivevar
+ options.addOption(OptionBuilder
+ .withValueSeparator()
+ .hasArgs(2)
+ .withArgName("key=value")
+ .withLongOpt("hivevar")
+ .withDescription("Variable substitution e.g. --hivevar A=B")
+ .create());
+
+ // [-version|--version]
+ options.addOption(new Option("version", "version", false, "Print PL/SQL version"));
+
+ // [-trace|--trace]
+ options.addOption(new Option("trace", "trace", false, "Print debug information"));
+
+ // [-offline|--offline]
+ options.addOption(new Option("offline", "offline", false, "Offline mode - skip SQL execution"));
+
+ // [-H|--help]
+ options.addOption(new Option("H", "help", false, "Print help information"));
+ }
+
+ /**
+ * Parse the command line arguments
+ */
+ public boolean parse(String[] args) {
+ try {
+ commandLine = new GnuParser().parse(options, args);
+ execString = commandLine.getOptionValue('e');
+ fileName = commandLine.getOptionValue('f');
+ main = commandLine.getOptionValue("main");
+ Properties p = commandLine.getOptionProperties("hiveconf");
+ for (String key : p.stringPropertyNames()) {
+ vars.put(key, p.getProperty(key));
+ }
+ p = commandLine.getOptionProperties("hivevar");
+ for (String key : p.stringPropertyNames()) {
+ vars.put(key, p.getProperty(key));
+ }
+ p = commandLine.getOptionProperties("define");
+ for (String key : p.stringPropertyNames()) {
+ vars.put(key, p.getProperty(key));
+ }
+ } catch (ParseException e) {
+ System.err.println(e.getMessage());
+ return false;
+ }
+ return true;
+ }
+
+ /**
+ * Get the value of execution option -e
+ */
+ public String getExecString() {
+ return execString;
+ }
+
+ /**
+ * Get the value of file option -f
+ */
+ public String getFileName() {
+ return fileName;
+ }
+
+ /**
+ * Get the value of -main option
+ */
+ public String getMain() {
+ return main;
+ }
+
+ /**
+ * Get the variables
+ */
+ public Map getVars() {
+ return vars;
+ }
+
+ /**
+ * Test whether version option is set
+ */
+ public boolean hasVersionOption() {
+ if (commandLine.hasOption("version")) {
+ return true;
+ }
+ return false;
+ }
+
+ /**
+ * Test whether debug option is set
+ */
+ public boolean hasTraceOption() {
+ if (commandLine.hasOption("trace")) {
+ return true;
+ }
+ return false;
+ }
+
+ /**
+ * Test whether offline option is set
+ */
+ public boolean hasOfflineOption() {
+ if (commandLine.hasOption("offline")) {
+ return true;
+ }
+ return false;
+ }
+
+ /**
+ * Test whether help option is set
+ */
+ public boolean hasHelpOption() {
+ if (commandLine.hasOption('H')) {
+ return true;
+ }
+ return false;
+ }
+
+ /**
+ * Print help information
+ */
+ public void printHelp() {
+ new HelpFormatter().printHelp("plsql", options);
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/Column.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/Column.java
new file mode 100644
index 00000000000000..6e49e455989e08
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/Column.java
@@ -0,0 +1,70 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Column.java
+// and modified by Doris
+
+package org.apache.doris.plsql;
+
+/**
+ * Table column
+ */
+public class Column {
+ private org.apache.doris.plsql.ColumnDefinition definition;
+ private Var value;
+
+ public Column(String name, String type, Var value) {
+ this.definition = new org.apache.doris.plsql.ColumnDefinition(name,
+ org.apache.doris.plsql.ColumnType.parse(type));
+ this.value = value;
+ }
+
+ /**
+ * Set the column value
+ */
+ public void setValue(Var value) {
+ this.value = value;
+ }
+
+ /**
+ * Get the column name
+ */
+ public String getName() {
+ return definition.columnName();
+ }
+
+ /**
+ * Get the column type
+ */
+ public String getType() {
+ return definition.columnType().typeString();
+ }
+
+ public org.apache.doris.plsql.ColumnDefinition definition() {
+ return definition;
+ }
+
+ /**
+ * Get the column value
+ */
+ Var getValue() {
+ return value;
+ }
+}
+
+
+
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/ColumnDefinition.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/ColumnDefinition.java
new file mode 100644
index 00000000000000..f9d2ed6aae068c
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/ColumnDefinition.java
@@ -0,0 +1,47 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/ColumnDefinition.java
+// and modified by Doris
+
+package org.apache.doris.plsql;
+
+public class ColumnDefinition {
+ private final String name;
+ private final ColumnType type;
+
+ public static ColumnDefinition unnamed(ColumnType type) {
+ return new ColumnDefinition("__UNNAMED__", type);
+ }
+
+ public ColumnDefinition(String name, ColumnType type) {
+ this.name = name;
+ this.type = type;
+ }
+
+ public String columnName() {
+ return name;
+ }
+
+ public ColumnType columnType() {
+ return type;
+ }
+
+ public String columnTypeString() {
+ return type.typeString();
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/ColumnMap.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/ColumnMap.java
new file mode 100644
index 00000000000000..09e371ed7228a8
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/ColumnMap.java
@@ -0,0 +1,53 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/ColumnMap.java
+// and modified by Doris
+
+package org.apache.doris.plsql;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+public class ColumnMap {
+ private List columns = new ArrayList<>();
+ private Map columnMap = new HashMap<>();
+
+ public void add(Column column) {
+ columns.add(column);
+ columnMap.put(column.getName().toUpperCase(), column);
+ }
+
+ public Column get(String name) {
+ return columnMap.get(name.toUpperCase());
+ }
+
+ public Column at(int index) {
+ return columns.get(index);
+ }
+
+ public List columns() {
+ return Collections.unmodifiableList(columns);
+ }
+
+ public int size() {
+ return columns.size();
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/ColumnType.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/ColumnType.java
new file mode 100644
index 00000000000000..abed9ad1baba8b
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/ColumnType.java
@@ -0,0 +1,76 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/ColumnType.java
+// and modified by Doris
+
+package org.apache.doris.plsql;
+
+public class ColumnType {
+ private final String type;
+ private final Precision precision;
+
+ public static ColumnType parse(String type) {
+ return new ColumnType(parseType(type), Precision.parse(type));
+ }
+
+ public ColumnType(String type, Precision precision) {
+ this.type = type;
+ this.precision = precision;
+ }
+
+ private static String parseType(String type) {
+ int index = type.indexOf('(');
+ return index == -1 ? type : type.substring(0, index);
+ }
+
+ public String typeString() {
+ return type;
+ }
+
+ public Precision precision() {
+ return precision;
+ }
+
+ private static class Precision {
+ public final int len;
+ public final int scale;
+
+ public static Precision parse(String type) {
+ int open = type.indexOf('(');
+ if (open == -1) {
+ return new Precision(0, 0);
+ }
+ int len;
+ int scale = 0;
+ int comma = type.indexOf(',', open);
+ int close = type.indexOf(')', open);
+ if (comma == -1) {
+ len = Integer.parseInt(type.substring(open + 1, close));
+ } else {
+ len = Integer.parseInt(type.substring(open + 1, comma));
+ scale = Integer.parseInt(type.substring(comma + 1, close));
+ }
+ return new Precision(scale, len);
+ }
+
+ Precision(int scale, int len) {
+ this.len = len;
+ this.scale = scale;
+ }
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/Conf.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/Conf.java
new file mode 100644
index 00000000000000..c63b1c060560bb
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/Conf.java
@@ -0,0 +1,159 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/plsql/src/main/java/org/apache/hive/plsql/Conf.java
+// and modified by Doris
+
+package org.apache.doris.plsql;
+
+import org.apache.doris.plsql.Exec.OnError;
+
+import org.apache.hadoop.conf.Configuration;
+
+import java.util.HashMap;
+
+/**
+ * PL/SQL run-time configuration
+ */
+public class Conf extends Configuration {
+
+ public static final String DOT_PLSQLRC = ".plsqlrc";
+ public static final String PLSQLRC = "plsqlrc";
+ public static final String PLSQL_LOCALS_SQL = "plsql_locals.sql";
+
+ public static final String CONN_CONVERT = "plsql.conn.convert.";
+ public static final String CONN_DEFAULT = "plsql.conn.default";
+ public static final String DUAL_TABLE = "plsql.dual.table";
+ public static final String INSERT_VALUES = "plsql.insert.values";
+ public static final String ONERROR = "plsql.onerror";
+ public static final String TEMP_TABLES = "plsql.temp.tables";
+ public static final String TEMP_TABLES_SCHEMA = "plsql.temp.tables.schema";
+ public static final String TEMP_TABLES_LOCATION = "plsql.temp.tables.location";
+
+ public static final String TRUE = "true";
+ public static final String FALSE = "false";
+ public static final String YES = "yes";
+ public static final String NO = "no";
+
+ public enum InsertValues {
+ NATIVE, SELECT
+ }
+
+ public enum TempTables {
+ NATIVE, MANAGED
+ }
+
+ public String defaultConnection;
+
+ OnError onError = OnError.EXCEPTION;
+ InsertValues insertValues = InsertValues.NATIVE;
+ TempTables tempTables = TempTables.NATIVE;
+
+ String dualTable = null;
+
+ String tempTablesSchema = "";
+ String tempTablesLocation = "/tmp/plsql";
+
+ HashMap connConvert = new HashMap();
+
+ /**
+ * Set an option
+ */
+ public void setOption(String key, String value) {
+ if (key.startsWith(CONN_CONVERT)) {
+ setConnectionConvert(key.substring(19), value);
+ } else if (key.compareToIgnoreCase(CONN_DEFAULT) == 0) {
+ defaultConnection = value;
+ } else if (key.compareToIgnoreCase(DUAL_TABLE) == 0) {
+ dualTable = value;
+ } else if (key.compareToIgnoreCase(INSERT_VALUES) == 0) {
+ setInsertValues(value);
+ } else if (key.compareToIgnoreCase(ONERROR) == 0) {
+ setOnError(value);
+ } else if (key.compareToIgnoreCase(TEMP_TABLES) == 0) {
+ setTempTables(value);
+ } else if (key.compareToIgnoreCase(TEMP_TABLES_SCHEMA) == 0) {
+ tempTablesSchema = value;
+ } else if (key.compareToIgnoreCase(TEMP_TABLES_LOCATION) == 0) {
+ tempTablesLocation = value;
+ }
+ }
+
+ /**
+ * Set plsql.insert.values option
+ */
+ private void setInsertValues(String value) {
+ if (value.compareToIgnoreCase("NATIVE") == 0) {
+ insertValues = InsertValues.NATIVE;
+ } else if (value.compareToIgnoreCase("SELECT") == 0) {
+ insertValues = InsertValues.SELECT;
+ }
+ }
+
+ /**
+ * Set plsql.temp.tables option
+ */
+ private void setTempTables(String value) {
+ if (value.compareToIgnoreCase("NATIVE") == 0) {
+ tempTables = TempTables.NATIVE;
+ } else if (value.compareToIgnoreCase("MANAGED") == 0) {
+ tempTables = TempTables.MANAGED;
+ }
+ }
+
+ /**
+ * Set error handling approach
+ */
+ private void setOnError(String value) {
+ if (value.compareToIgnoreCase("EXCEPTION") == 0) {
+ onError = OnError.EXCEPTION;
+ } else if (value.compareToIgnoreCase("SETERROR") == 0) {
+ onError = OnError.SETERROR;
+ }
+ if (value.compareToIgnoreCase("STOP") == 0) {
+ onError = OnError.STOP;
+ }
+ }
+
+ /**
+ * Set whether convert or not SQL for the specified connection profile
+ */
+ void setConnectionConvert(String name, String value) {
+ boolean convert = false;
+ if (value.compareToIgnoreCase(TRUE) == 0 || value.compareToIgnoreCase(YES) == 0) {
+ convert = true;
+ }
+ connConvert.put(name, convert);
+ }
+
+ /**
+ * Get whether convert or not SQL for the specified connection profile
+ */
+ boolean getConnectionConvert(String name) {
+ Boolean convert = connConvert.get(name);
+ if (convert != null) {
+ return convert.booleanValue();
+ }
+ return false;
+ }
+
+ /**
+ * Load parameters
+ */
+ public void init() {
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/Conn.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/Conn.java
new file mode 100644
index 00000000000000..899d251c47ffed
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/Conn.java
@@ -0,0 +1,253 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Conn.java
+// and modified by Doris
+
+package org.apache.doris.plsql;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Stack;
+
+public class Conn {
+
+ public enum Type {
+ DB2, HIVE, MYSQL, TERADATA
+ }
+
+ HashMap> connections = new HashMap>();
+ HashMap connStrings = new HashMap();
+ HashMap connTypes = new HashMap();
+
+ HashMap> connInits = new HashMap>();
+ HashMap> preSql = new HashMap>();
+
+ Exec exec;
+ Timer timer = new Timer();
+ boolean trace = false;
+ boolean info = false;
+
+ Conn(Exec e) {
+ exec = e;
+ trace = exec.getTrace();
+ info = exec.getInfo();
+ }
+
+ /**
+ * Execute a SQL query
+ */
+ public Query executeQuery(Query query, String connName) {
+ try {
+ Connection conn = getConnection(connName);
+ runPreSql(connName, conn);
+ Statement stmt = conn.createStatement();
+ exec.info(null, "Starting query");
+ timer.start();
+ ResultSet rs = stmt.executeQuery(query.sql);
+ timer.stop();
+ query.set(conn, stmt, rs);
+ if (info) {
+ exec.info(null, "Query executed successfully (" + timer.format() + ")");
+ }
+ } catch (Exception e) {
+ query.setError(e);
+ }
+ return query;
+ }
+
+ /**
+ * Prepare a SQL query
+ */
+ public Query prepareQuery(Query query, String connName) {
+ try {
+ Connection conn = getConnection(connName);
+ timer.start();
+ PreparedStatement stmt = conn.prepareStatement(query.sql);
+ timer.stop();
+ query.set(conn, stmt);
+ if (info) {
+ exec.info(null, "Prepared statement executed successfully (" + timer.format() + ")");
+ }
+ } catch (Exception e) {
+ query.setError(e);
+ }
+ return query;
+ }
+
+ /**
+ * Close the query object
+ */
+ public void closeQuery(Query query, String connName) {
+ query.closeStatement();
+ returnConnection(connName, query.getConnection());
+ }
+
+ /**
+ * Run pre-SQL statements
+ *
+ * @throws SQLException
+ */
+ void runPreSql(String connName, Connection conn) throws SQLException {
+ ArrayList sqls = preSql.get(connName);
+ if (sqls != null) {
+ Statement s = conn.createStatement();
+ for (String sql : sqls) {
+ exec.info(null, "Starting pre-SQL statement");
+ s.execute(sql);
+ }
+ s.close();
+ preSql.remove(connName);
+ }
+ }
+
+ /**
+ * Get a connection
+ *
+ * @throws Exception
+ */
+ synchronized Connection getConnection(String connName) throws Exception {
+ Stack connStack = connections.get(connName);
+ String connStr = connStrings.get(connName);
+ if (connStr == null) {
+ throw new Exception("Unknown connection profile: " + connName);
+ }
+ if (connStack != null && !connStack.empty()) { // Reuse an existing connection
+ return connStack.pop();
+ }
+ Connection c = openConnection(connStr);
+ ArrayList sqls = connInits.get(connName); // Run initialization statements on the connection
+ if (sqls != null) {
+ Statement s = c.createStatement();
+ for (String sql : sqls) {
+ s.execute(sql);
+ }
+ s.close();
+ }
+ return c;
+ }
+
+ /**
+ * Open a new connection
+ *
+ * @throws Exception
+ */
+ Connection openConnection(String connStr) throws Exception {
+ String driver = "com.mysql.jdbc.Driver";
+ StringBuilder url = new StringBuilder();
+ String usr = "";
+ String pwd = "";
+ if (connStr != null) {
+ String[] c = connStr.split(";");
+ if (c.length >= 1) {
+ driver = c[0];
+ }
+ if (c.length >= 2) {
+ url.append(c[1]);
+ } else {
+ url.append("jdbc:mysql://");
+ }
+ for (int i = 2; i < c.length; i++) {
+ if (c[i].contains("=")) {
+ url.append(";");
+ url.append(c[i]);
+ } else if (usr.isEmpty()) {
+ usr = c[i];
+ } else if (pwd.isEmpty()) {
+ pwd = c[i];
+ }
+ }
+ }
+ Class.forName(driver);
+ timer.start();
+ Connection conn = DriverManager.getConnection(url.toString().trim(), usr, pwd);
+ timer.stop();
+ if (info) {
+ exec.info(null, "Open connection: " + url + " (" + timer.format() + ")");
+ }
+ return conn;
+ }
+
+ /**
+ * Get the database type by profile name
+ */
+ Conn.Type getTypeByProfile(String name) {
+ return connTypes.get(name);
+ }
+
+ /**
+ * Get the database type by connection string
+ */
+ Conn.Type getType(String connStr) {
+ if (connStr.contains("hive.")) {
+ return Type.HIVE;
+ } else if (connStr.contains("db2.")) {
+ return Type.DB2;
+ } else if (connStr.contains("mysql.")) {
+ return Type.MYSQL;
+ } else if (connStr.contains("teradata.")) {
+ return Type.TERADATA;
+ }
+ return Type.HIVE;
+ }
+
+ /**
+ * Return the connection to the pool
+ */
+ void returnConnection(String name, Connection conn) {
+ if (conn != null) {
+ connections.get(name).push(conn);
+ }
+ }
+
+ /**
+ * Add a new connection string
+ */
+ public void addConnection(String name, String connStr) {
+ connections.put(name, new Stack());
+ connStrings.put(name, connStr);
+ connTypes.put(name, getType(connStr));
+ }
+
+ /**
+ * Add initialization statements for the specified connection
+ */
+ public void addConnectionInit(String name, String connInit) {
+ ArrayList a = new ArrayList();
+ String[] sa = connInit.split(";");
+ for (String s : sa) {
+ s = s.trim();
+ if (!s.isEmpty()) {
+ a.add(s);
+ }
+ }
+ connInits.put(name, a);
+ }
+
+ /**
+ * Add SQL statements to be executed before executing the next SQL statement (pre-SQL)
+ */
+ public void addPreSql(String name, ArrayList sql) {
+ preSql.put(name, sql);
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/Console.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/Console.java
new file mode 100644
index 00000000000000..024ea356759483
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/Console.java
@@ -0,0 +1,51 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Console.java
+// and modified by Doris
+
+package org.apache.doris.plsql;
+
+public interface Console {
+ void print(String msg);
+
+ void printLine(String msg);
+
+ void printError(String msg);
+
+ void flushConsole();
+
+ Console STANDARD = new Console() {
+ @Override
+ public void print(String msg) {
+ System.out.print(msg);
+ }
+
+ @Override
+ public void printLine(String msg) {
+ System.out.println(msg);
+ }
+
+ @Override
+ public void printError(String msg) {
+ System.err.println(msg);
+ }
+
+ @Override
+ public void flushConsole() {}
+ };
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/Converter.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/Converter.java
new file mode 100644
index 00000000000000..01e9e77e1fae99
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/Converter.java
@@ -0,0 +1,83 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Converter.java
+// and modified by Doris
+
+package org.apache.doris.plsql;
+
+import org.apache.doris.nereids.PLParser.DtypeContext;
+import org.apache.doris.nereids.PLParser.Dtype_lenContext;
+
+/**
+ * On-the-fly SQL Converter
+ */
+public class Converter {
+
+ Exec exec;
+ boolean trace = false;
+
+ Converter(Exec e) {
+ exec = e;
+ trace = exec.getTrace();
+ }
+
+ /**
+ * Convert a data type
+ */
+ String dataType(DtypeContext type,
+ Dtype_lenContext len) {
+ String t = exec.getText(type);
+ boolean enclosed = false;
+ if (t.charAt(0) == '[') {
+ t = t.substring(1, t.length() - 1);
+ enclosed = true;
+ }
+ if (t.equalsIgnoreCase("BIT")) {
+ t = "TINYINT";
+ } else if (t.equalsIgnoreCase("INT") || t.equalsIgnoreCase("INTEGER")) {
+ // MySQL can use INT(n)
+ } else if (t.equalsIgnoreCase("INT2")) {
+ t = "SMALLINT";
+ } else if (t.equalsIgnoreCase("INT4")) {
+ t = "INT";
+ } else if (t.equalsIgnoreCase("INT8")) {
+ t = "BIGINT";
+ } else if (t.equalsIgnoreCase("DATETIME") || t.equalsIgnoreCase("SMALLDATETIME")) {
+ t = "TIMESTAMP";
+ } else if ((t.equalsIgnoreCase("VARCHAR") || t.equalsIgnoreCase("NVARCHAR")) && len.MAX() != null) {
+ t = "STRING";
+ } else if (t.equalsIgnoreCase("VARCHAR2") || t.equalsIgnoreCase("NCHAR") || t.equalsIgnoreCase("NVARCHAR")
+ || t.equalsIgnoreCase("TEXT")) {
+ t = "STRING";
+ } else if (t.equalsIgnoreCase("NUMBER") || t.equalsIgnoreCase("NUMERIC")) {
+ t = "DECIMAL";
+ if (len != null) {
+ t += exec.getText(len);
+ }
+ } else if (len != null) {
+ if (!enclosed) {
+ return exec.getText(type, type.getStart(), len.getStop());
+ } else {
+ return t + exec.getText(len, len.getStart(), len.getStop());
+ }
+ } else if (!enclosed) {
+ return exec.getText(type, type.getStart(), type.getStop());
+ }
+ return t;
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/Cursor.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/Cursor.java
new file mode 100644
index 00000000000000..524622dfb57e42
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/Cursor.java
@@ -0,0 +1,128 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Cursor.java
+// and modified by Doris
+
+package org.apache.doris.plsql;
+
+import org.apache.doris.nereids.PLParser.Open_stmtContext;
+import org.apache.doris.plsql.executor.QueryExecutor;
+import org.apache.doris.plsql.executor.QueryResult;
+
+import org.antlr.v4.runtime.ParserRuleContext;
+
+public class Cursor {
+ private String sql;
+ private ParserRuleContext sqlExpr;
+ private ParserRuleContext sqlSelect;
+ private boolean withReturn = false;
+ private QueryResult queryResult;
+
+ public enum State {
+ OPEN, FETCHED_OK, FETCHED_NODATA, CLOSE
+ }
+
+ State state = State.CLOSE;
+
+ public Cursor(String sql) {
+ this.sql = sql;
+ }
+
+ public void setExprCtx(ParserRuleContext sqlExpr) {
+ this.sqlExpr = sqlExpr;
+ }
+
+ public void setSelectCtx(ParserRuleContext sqlSelect) {
+ this.sqlSelect = sqlSelect;
+ }
+
+ public void setWithReturn(boolean withReturn) {
+ this.withReturn = withReturn;
+ }
+
+ public ParserRuleContext getSqlExpr() {
+ return sqlExpr;
+ }
+
+ public ParserRuleContext getSqlSelect() {
+ return sqlSelect;
+ }
+
+ public boolean isWithReturn() {
+ return withReturn;
+ }
+
+ public void setSql(String sql) {
+ this.sql = sql;
+ }
+
+ public String getSql() {
+ return sql;
+ }
+
+ public void open(QueryExecutor queryExecutor, Open_stmtContext ctx) {
+ this.queryResult = queryExecutor.executeQuery(sql, ctx);
+ this.state = State.OPEN;
+ }
+
+ public QueryResult getQueryResult() {
+ return queryResult;
+ }
+
+ /**
+ * Set the fetch status
+ */
+ public void setFetch(boolean ok) {
+ if (ok) {
+ state = State.FETCHED_OK;
+ } else {
+ state = State.FETCHED_NODATA;
+ }
+ }
+
+ public Boolean isFound() {
+ if (state == State.OPEN || state == State.CLOSE) {
+ return null;
+ }
+ if (state == State.FETCHED_OK) {
+ return Boolean.TRUE;
+ }
+ return Boolean.FALSE;
+ }
+
+ public Boolean isNotFound() {
+ if (state == State.OPEN || state == State.CLOSE) {
+ return null;
+ }
+ if (state == State.FETCHED_NODATA) {
+ return Boolean.TRUE;
+ }
+ return Boolean.FALSE;
+ }
+
+ public void close() {
+ if (queryResult != null) {
+ queryResult.close();
+ state = State.CLOSE;
+ }
+ }
+
+ public boolean isOpen() {
+ return state != State.CLOSE;
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/Exec.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/Exec.java
new file mode 100644
index 00000000000000..9a042623da3d7d
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/Exec.java
@@ -0,0 +1,2401 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Exec.java
+// and modified by Doris
+
+package org.apache.doris.plsql;
+
+import org.apache.doris.nereids.PLLexer;
+import org.apache.doris.nereids.PLParser;
+import org.apache.doris.nereids.PLParser.Allocate_cursor_stmtContext;
+import org.apache.doris.nereids.PLParser.Assignment_stmt_collection_itemContext;
+import org.apache.doris.nereids.PLParser.Assignment_stmt_multiple_itemContext;
+import org.apache.doris.nereids.PLParser.Assignment_stmt_select_itemContext;
+import org.apache.doris.nereids.PLParser.Assignment_stmt_single_itemContext;
+import org.apache.doris.nereids.PLParser.Associate_locator_stmtContext;
+import org.apache.doris.nereids.PLParser.Begin_end_blockContext;
+import org.apache.doris.nereids.PLParser.Bool_exprContext;
+import org.apache.doris.nereids.PLParser.Bool_expr_binaryContext;
+import org.apache.doris.nereids.PLParser.Bool_expr_unaryContext;
+import org.apache.doris.nereids.PLParser.Bool_literalContext;
+import org.apache.doris.nereids.PLParser.Break_stmtContext;
+import org.apache.doris.nereids.PLParser.Call_stmtContext;
+import org.apache.doris.nereids.PLParser.Close_stmtContext;
+import org.apache.doris.nereids.PLParser.Create_function_stmtContext;
+import org.apache.doris.nereids.PLParser.Create_package_body_stmtContext;
+import org.apache.doris.nereids.PLParser.Create_package_stmtContext;
+import org.apache.doris.nereids.PLParser.Create_procedure_stmtContext;
+import org.apache.doris.nereids.PLParser.Date_literalContext;
+import org.apache.doris.nereids.PLParser.Dec_numberContext;
+import org.apache.doris.nereids.PLParser.Declare_condition_itemContext;
+import org.apache.doris.nereids.PLParser.Declare_cursor_itemContext;
+import org.apache.doris.nereids.PLParser.Declare_handler_itemContext;
+import org.apache.doris.nereids.PLParser.Declare_var_itemContext;
+import org.apache.doris.nereids.PLParser.Doris_statementContext;
+import org.apache.doris.nereids.PLParser.DtypeContext;
+import org.apache.doris.nereids.PLParser.Dtype_lenContext;
+import org.apache.doris.nereids.PLParser.Exception_block_itemContext;
+import org.apache.doris.nereids.PLParser.Exec_stmtContext;
+import org.apache.doris.nereids.PLParser.Exit_stmtContext;
+import org.apache.doris.nereids.PLParser.ExprContext;
+import org.apache.doris.nereids.PLParser.Expr_agg_window_funcContext;
+import org.apache.doris.nereids.PLParser.Expr_case_searchedContext;
+import org.apache.doris.nereids.PLParser.Expr_case_simpleContext;
+import org.apache.doris.nereids.PLParser.Expr_concatContext;
+import org.apache.doris.nereids.PLParser.Expr_cursor_attributeContext;
+import org.apache.doris.nereids.PLParser.Expr_dot_method_callContext;
+import org.apache.doris.nereids.PLParser.Expr_dot_property_accessContext;
+import org.apache.doris.nereids.PLParser.Expr_funcContext;
+import org.apache.doris.nereids.PLParser.Expr_func_paramsContext;
+import org.apache.doris.nereids.PLParser.Expr_intervalContext;
+import org.apache.doris.nereids.PLParser.Expr_spec_funcContext;
+import org.apache.doris.nereids.PLParser.Expr_stmtContext;
+import org.apache.doris.nereids.PLParser.Fetch_stmtContext;
+import org.apache.doris.nereids.PLParser.For_cursor_stmtContext;
+import org.apache.doris.nereids.PLParser.For_range_stmtContext;
+import org.apache.doris.nereids.PLParser.Get_diag_stmt_exception_itemContext;
+import org.apache.doris.nereids.PLParser.Get_diag_stmt_rowcount_itemContext;
+import org.apache.doris.nereids.PLParser.Host_cmdContext;
+import org.apache.doris.nereids.PLParser.Host_stmtContext;
+import org.apache.doris.nereids.PLParser.Ident_plContext;
+import org.apache.doris.nereids.PLParser.If_bteq_stmtContext;
+import org.apache.doris.nereids.PLParser.If_plsql_stmtContext;
+import org.apache.doris.nereids.PLParser.If_tsql_stmtContext;
+import org.apache.doris.nereids.PLParser.Include_stmtContext;
+import org.apache.doris.nereids.PLParser.Int_numberContext;
+import org.apache.doris.nereids.PLParser.Label_stmtContext;
+import org.apache.doris.nereids.PLParser.Leave_stmtContext;
+import org.apache.doris.nereids.PLParser.Map_object_stmtContext;
+import org.apache.doris.nereids.PLParser.MultipartIdentifierContext;
+import org.apache.doris.nereids.PLParser.NamedExpressionSeqContext;
+import org.apache.doris.nereids.PLParser.Null_constContext;
+import org.apache.doris.nereids.PLParser.Open_stmtContext;
+import org.apache.doris.nereids.PLParser.Print_stmtContext;
+import org.apache.doris.nereids.PLParser.ProgramContext;
+import org.apache.doris.nereids.PLParser.QueryContext;
+import org.apache.doris.nereids.PLParser.Quit_stmtContext;
+import org.apache.doris.nereids.PLParser.Resignal_stmtContext;
+import org.apache.doris.nereids.PLParser.Return_stmtContext;
+import org.apache.doris.nereids.PLParser.Set_current_schema_optionContext;
+import org.apache.doris.nereids.PLParser.Set_doris_session_optionContext;
+import org.apache.doris.nereids.PLParser.Signal_stmtContext;
+import org.apache.doris.nereids.PLParser.StmtContext;
+import org.apache.doris.nereids.PLParser.StringContext;
+import org.apache.doris.nereids.PLParser.Timestamp_literalContext;
+import org.apache.doris.nereids.PLParser.Unconditional_loop_stmtContext;
+import org.apache.doris.nereids.PLParser.Values_into_stmtContext;
+import org.apache.doris.nereids.PLParser.While_stmtContext;
+import org.apache.doris.nereids.parser.CaseInsensitiveStream;
+import org.apache.doris.nereids.parser.ParserUtils;
+import org.apache.doris.nereids.parser.plsql.PLSqlLogicalPlanBuilder;
+import org.apache.doris.nereids.trees.expressions.NamedExpression;
+import org.apache.doris.nereids.trees.plans.commands.info.FuncNameInfo;
+import org.apache.doris.plsql.Var.Type;
+import org.apache.doris.plsql.exception.PlValidationException;
+import org.apache.doris.plsql.exception.QueryException;
+import org.apache.doris.plsql.exception.TypeException;
+import org.apache.doris.plsql.exception.UndefinedIdentException;
+import org.apache.doris.plsql.executor.JdbcQueryExecutor;
+import org.apache.doris.plsql.executor.Metadata;
+import org.apache.doris.plsql.executor.QueryExecutor;
+import org.apache.doris.plsql.executor.QueryResult;
+import org.apache.doris.plsql.executor.ResultListener;
+import org.apache.doris.plsql.functions.BuiltinFunctions;
+import org.apache.doris.plsql.functions.DorisFunctionRegistry;
+import org.apache.doris.plsql.functions.FunctionDatetime;
+import org.apache.doris.plsql.functions.FunctionMisc;
+import org.apache.doris.plsql.functions.FunctionRegistry;
+import org.apache.doris.plsql.functions.FunctionString;
+import org.apache.doris.plsql.functions.InMemoryFunctionRegistry;
+import org.apache.doris.plsql.metastore.PlsqlMetaClient;
+import org.apache.doris.plsql.objects.DbmOutput;
+import org.apache.doris.plsql.objects.DbmOutputClass;
+import org.apache.doris.plsql.objects.Method;
+import org.apache.doris.plsql.objects.MethodDictionary;
+import org.apache.doris.plsql.objects.MethodParams;
+import org.apache.doris.plsql.objects.PlObject;
+import org.apache.doris.plsql.objects.Table;
+import org.apache.doris.plsql.objects.TableClass;
+import org.apache.doris.plsql.objects.UtlFile;
+import org.apache.doris.plsql.objects.UtlFileClass;
+import org.apache.doris.plsql.packages.DorisPackageRegistry;
+import org.apache.doris.plsql.packages.InMemoryPackageRegistry;
+import org.apache.doris.plsql.packages.PackageRegistry;
+
+import com.google.common.collect.ImmutableList;
+import org.antlr.v4.runtime.ANTLRInputStream;
+import org.antlr.v4.runtime.CharStream;
+import org.antlr.v4.runtime.CharStreams;
+import org.antlr.v4.runtime.CommonTokenStream;
+import org.antlr.v4.runtime.ParserRuleContext;
+import org.antlr.v4.runtime.Token;
+import org.antlr.v4.runtime.misc.NotNull;
+import org.antlr.v4.runtime.tree.ParseTree;
+import org.antlr.v4.runtime.tree.TerminalNode;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang3.exception.ExceptionUtils;
+
+import java.io.ByteArrayInputStream;
+import java.io.Closeable;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.PrintWriter;
+import java.io.UncheckedIOException;
+import java.math.BigDecimal;
+import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Objects;
+import java.util.Optional;
+import java.util.Stack;
+import java.util.stream.Collectors;
+
+/**
+ * PL/SQL script executor
+ */
+public class Exec extends org.apache.doris.nereids.PLParserBaseVisitor implements Closeable {
+
+ public static final String VERSION = "PL/SQL 0.1";
+ public static final String ERRORCODE = "ERRORCODE";
+ public static final String SQLCODE = "SQLCODE";
+ public static final String SQLSTATE = "SQLSTATE";
+ public static final String HOSTCODE = "HOSTCODE";
+
+ Exec exec;
+ public FunctionRegistry functions;
+ private BuiltinFunctions builtinFunctions;
+ private PlsqlMetaClient client;
+ QueryExecutor queryExecutor;
+ private PackageRegistry packageRegistry = new InMemoryPackageRegistry();
+ private boolean packageLoading = false;
+ private final Map types = new HashMap<>();
+
+ public enum OnError {
+ EXCEPTION, SETERROR, STOP
+ }
+
+ // Scopes of execution (code blocks) with own local variables, parameters and exception handlers
+ Stack scopes = new Stack<>();
+ Scope globalScope;
+ Scope currentScope;
+
+ Stack stack = new Stack<>();
+ Stack labels = new Stack<>();
+ Stack callStack = new Stack<>();
+
+ Stack signals = new Stack<>();
+ Signal currentSignal;
+ Scope currentHandlerScope;
+ boolean resignal = false;
+
+ HashMap managedTables = new HashMap<>();
+ HashMap objectMap = new HashMap<>();
+ HashMap objectConnMap = new HashMap<>();
+ HashMap> returnCursors = new HashMap<>();
+ HashMap packages = new HashMap<>();
+
+ Package currentPackageDecl = null;
+ Arguments arguments = new Arguments();
+ public Conf conf;
+ Expression expr;
+ Converter converter;
+ Meta meta;
+ Stmt stmt;
+ Conn conn;
+ Console console = Console.STANDARD;
+ ResultListener resultListener = ResultListener.NONE;
+
+ int rowCount = 0;
+
+ StringBuilder localUdf = new StringBuilder();
+ boolean initRoutines = false;
+ public boolean inCallStmt = false;
+ boolean udfRegistered = false;
+ boolean udfRun = false;
+
+ boolean dotPlsqlrcExists = false;
+ boolean plsqlrcExists = false;
+
+ boolean trace = false;
+ boolean info = true;
+ boolean offline = false;
+ public PLSqlLogicalPlanBuilder logicalPlanBuilder;
+
+ public Exec() {
+ exec = this;
+ queryExecutor = new JdbcQueryExecutor(this); // use by pl-sql.sh
+ }
+
+ public Exec(Conf conf, Console console, QueryExecutor queryExecutor, ResultListener resultListener) {
+ this.conf = conf;
+ this.exec = this;
+ this.console = console;
+ this.queryExecutor = queryExecutor;
+ this.resultListener = resultListener;
+ this.client = new PlsqlMetaClient();
+ }
+
+ Exec(Exec exec) {
+ this.exec = exec;
+ this.console = exec.console;
+ this.queryExecutor = exec.queryExecutor;
+ this.client = exec.client;
+ }
+
+ /**
+ * Set a variable using a value from the parameter or the stack
+ */
+ public Var setVariable(String name, Var value) {
+ if (value == null || value == Var.Empty) {
+ if (exec.stack.empty()) {
+ return Var.Empty;
+ }
+ value = exec.stack.pop();
+ }
+ if (name.startsWith("plsql.")) {
+ exec.conf.setOption(name, value.toString());
+ return Var.Empty;
+ }
+ Var var = findVariable(name);
+ if (var != null) {
+ var.cast(value);
+ } else {
+ var = new Var(value);
+ var.setName(name);
+ if (exec.currentScope != null) {
+ exec.currentScope.addVariable(var);
+ }
+ }
+ return var;
+ }
+
+ public Var setVariable(String name) {
+ return setVariable(name, Var.Empty);
+ }
+
+ public Var setVariable(String name, String value) {
+ return setVariable(name, new Var(value));
+ }
+
+ public Var setVariable(String name, int value) {
+ return setVariable(name, new Var(Long.valueOf(value)));
+ }
+
+ /**
+ * Set variable to NULL
+ */
+ public Var setVariableToNull(String name) {
+ Var var = findVariable(name);
+ if (var != null) {
+ var.removeValue();
+ } else {
+ var = new Var();
+ var.setName(name);
+ if (exec.currentScope != null) {
+ exec.currentScope.addVariable(var);
+ }
+ }
+ return var;
+ }
+
+ /**
+ * Add a local variable to the current scope
+ */
+ public void addVariable(Var var) {
+ if (currentPackageDecl != null) {
+ currentPackageDecl.addVariable(var);
+ } else if (exec.currentScope != null) {
+ exec.currentScope.addVariable(var);
+ }
+ }
+
+ /**
+ * Add a condition handler to the current scope
+ */
+ public void addHandler(Handler handler) {
+ if (exec.currentScope != null) {
+ exec.currentScope.addHandler(handler);
+ }
+ }
+
+ /**
+ * Add a return cursor visible to procedure callers and clients
+ */
+ public void addReturnCursor(Var var) {
+ String routine = callStackPeek();
+ ArrayList cursors = returnCursors.computeIfAbsent(routine, k -> new ArrayList<>());
+ cursors.add(var);
+ }
+
+ /**
+ * Get the return cursor defined in the specified procedure
+ */
+ public Var consumeReturnCursor(String routine) {
+ ArrayList cursors = returnCursors.get(routine.toUpperCase());
+ if (cursors == null) {
+ return null;
+ }
+ Var var = cursors.get(0);
+ cursors.remove(0);
+ return var;
+ }
+
+ /**
+ * Push a value to the stack
+ */
+ public void stackPush(Var var) {
+ exec.stack.push(var);
+ }
+
+ /**
+ * Push a string value to the stack
+ */
+ public void stackPush(String val) {
+ exec.stack.push(new Var(val));
+ }
+
+ public void stackPush(StringBuilder val) {
+ stackPush(val.toString());
+ }
+
+ /**
+ * Push a boolean value to the stack
+ */
+ public void stackPush(Boolean val) {
+ exec.stack.push(new Var(val));
+ }
+
+ /**
+ * Select a value from the stack, but not remove
+ */
+ public Var stackPeek() {
+ return exec.stack.peek();
+ }
+
+ /**
+ * Pop a value from the stack
+ */
+ public Var stackPop() {
+ if (!exec.stack.isEmpty()) {
+ return exec.stack.pop();
+ }
+ return Var.Empty;
+ }
+
+ /**
+ * Push a value to the call stack
+ */
+ public void callStackPush(String val) {
+ exec.callStack.push(val.toUpperCase());
+ }
+
+ /**
+ * Select a value from the call stack, but not remove
+ */
+ public String callStackPeek() {
+ if (!exec.callStack.isEmpty()) {
+ return exec.callStack.peek();
+ }
+ return null;
+ }
+
+ /**
+ * Pop a value from the call stack
+ */
+ public String callStackPop() {
+ if (!exec.callStack.isEmpty()) {
+ return exec.callStack.pop();
+ }
+ return null;
+ }
+
+ /**
+ * Find an existing variable by name
+ */
+ public Var findVariable(String name) {
+ Var var;
+ String name1 = name.toUpperCase();
+ String name1a = null;
+ String name2;
+ Scope cur = exec.currentScope;
+ Package pack;
+ Package packCallContext = exec.getPackageCallContext();
+ ArrayList qualified = exec.meta.splitIdentifier(name);
+ if (qualified != null) {
+ name1 = qualified.get(0).toUpperCase();
+ name2 = qualified.get(1).toUpperCase();
+ pack = findPackage(name1);
+ if (pack != null) {
+ var = pack.findVariable(name2);
+ if (var != null) {
+ return var;
+ }
+ }
+ }
+ if (name1.startsWith(":")) {
+ name1a = name1.substring(1);
+ }
+ while (cur != null) {
+ var = findVariable(cur.vars, name1);
+ if (var == null && name1a != null) {
+ var = findVariable(cur.vars, name1a);
+ }
+ if (var == null && packCallContext != null) {
+ var = packCallContext.findVariable(name1);
+ }
+ if (var != null) {
+ return var;
+ }
+ if (cur.type == Scope.Type.ROUTINE) {
+ cur = exec.globalScope;
+ } else {
+ cur = cur.parent;
+ }
+ }
+ return null;
+ }
+
+ public Var findVariable(Var name) {
+ return findVariable(name.getName());
+ }
+
+ Var findVariable(Map vars, String name) {
+ return vars.get(name.toUpperCase());
+ }
+
+ /**
+ * Find a cursor variable by name
+ */
+ public Var findCursor(String name) {
+ Var cursor = exec.findVariable(name);
+ if (cursor != null && cursor.type == Type.CURSOR) {
+ return cursor;
+ }
+ return null;
+ }
+
+ /**
+ * Find the package by name
+ */
+ Package findPackage(String name) {
+ Package pkg = packages.get(name.toUpperCase());
+ if (pkg != null) {
+ return pkg;
+ }
+ Optional source = exec.packageRegistry.getPackage(name);
+ if (source.isPresent()) {
+ PLLexer lexer = new PLLexer(new CaseInsensitiveStream(CharStreams.fromString(source.get())));
+ CommonTokenStream tokens = new CommonTokenStream(lexer);
+ PLParser parser = newParser(tokens);
+ exec.packageLoading = true;
+ try {
+ visit(parser.program());
+ } finally {
+ exec.packageLoading = false;
+ }
+ } else {
+ return null;
+ }
+ return packages.get(name.toUpperCase());
+ }
+
+ /**
+ * Enter a new scope
+ */
+ public void enterScope(Scope scope) {
+ exec.scopes.push(scope);
+ }
+
+ public void enterScope(Scope.Type type) {
+ enterScope(type, null);
+ }
+
+ public void enterScope(Scope.Type type, Package pack) {
+ exec.currentScope = new Scope(exec.currentScope, type, pack);
+ enterScope(exec.currentScope);
+ }
+
+ public void enterGlobalScope() {
+ globalScope = new Scope(Scope.Type.GLOBAL);
+ currentScope = globalScope;
+ enterScope(globalScope);
+ }
+
+ /**
+ * Leave the current scope
+ */
+ public void leaveScope() {
+ if (!exec.signals.empty()) {
+ Scope scope = exec.scopes.peek();
+ Signal signal = exec.signals.peek();
+ if (exec.conf.onError != OnError.SETERROR) {
+ runExitHandler();
+ }
+ if (signal.type == Signal.Type.LEAVE_ROUTINE && scope.type == Scope.Type.ROUTINE) {
+ exec.signals.pop();
+ }
+ }
+ exec.currentScope = exec.scopes.pop().getParent();
+ }
+
+ /**
+ * Send a signal
+ */
+ public void signal(Signal signal) {
+ exec.signals.push(signal);
+ }
+
+ public void signal(Signal.Type type, String value, Exception exception) {
+ signal(new Signal(type, value, exception));
+ }
+
+ public void signal(Signal.Type type, String value) {
+ setSqlCode(SqlCodes.ERROR);
+ signal(type, value, null);
+ }
+
+ public void signal(Signal.Type type) {
+ setSqlCode(SqlCodes.ERROR);
+ signal(type, null, null);
+ }
+
+ public void signal(Query query) {
+ setSqlCode(query.getException());
+ signal(Signal.Type.SQLEXCEPTION, query.errorText(), query.getException());
+ }
+
+ public void signal(QueryResult query) {
+ setSqlCode(query.exception());
+ signal(Signal.Type.SQLEXCEPTION, query.errorText(), query.exception());
+ }
+
+ public void signal(Exception exception) {
+ setSqlCode(exception);
+ signal(Signal.Type.SQLEXCEPTION, exception.getMessage(), exception);
+ }
+
+ /**
+ * Resignal the condition
+ */
+ public void resignal() {
+ resignal(exec.currentSignal);
+ }
+
+ public void resignal(Signal signal) {
+ if (signal != null) {
+ exec.resignal = true;
+ signal(signal);
+ }
+ }
+
+ /**
+ * Run CONTINUE handlers
+ */
+ boolean runContinueHandler() {
+ Scope cur = exec.currentScope;
+ exec.currentSignal = exec.signals.pop();
+ while (cur != null) {
+ for (Handler h : cur.handlers) {
+ if (h.execType != Handler.ExecType.CONTINUE) {
+ continue;
+ }
+ if ((h.type != Signal.Type.USERDEFINED && h.type == exec.currentSignal.type)
+ || (h.type == Signal.Type.USERDEFINED && h.type == exec.currentSignal.type
+ && h.value.equalsIgnoreCase(exec.currentSignal.value))) {
+ trace(h.ctx, "CONTINUE HANDLER");
+ enterScope(Scope.Type.HANDLER);
+ exec.currentHandlerScope = h.scope;
+ visit(h.ctx.single_block_stmt());
+ leaveScope();
+ exec.currentSignal = null;
+ return true;
+ }
+ }
+ cur = cur.parent;
+ }
+ exec.signals.push(exec.currentSignal);
+ exec.currentSignal = null;
+ return false;
+ }
+
+ /**
+ * Run EXIT handler defined for the current scope
+ */
+ boolean runExitHandler() {
+ exec.currentSignal = exec.signals.pop();
+ for (Handler h : currentScope.handlers) {
+ if (h.execType != Handler.ExecType.EXIT) {
+ continue;
+ }
+ if ((h.type != Signal.Type.USERDEFINED && h.type == exec.currentSignal.type)
+ || (h.type == Signal.Type.USERDEFINED && h.type == exec.currentSignal.type
+ && h.value.equalsIgnoreCase(currentSignal.value))) {
+ trace(h.ctx, "EXIT HANDLER");
+ enterScope(Scope.Type.HANDLER);
+ exec.currentHandlerScope = h.scope;
+ visit(h.ctx.single_block_stmt());
+ leaveScope();
+ exec.currentSignal = null;
+ return true;
+ }
+ }
+ exec.signals.push(exec.currentSignal);
+ exec.currentSignal = null;
+ return false;
+ }
+
+ /**
+ * Pop the last signal
+ */
+ public Signal signalPop() {
+ if (!exec.signals.empty()) {
+ return exec.signals.pop();
+ }
+ return null;
+ }
+
+ /**
+ * Peek the last signal
+ */
+ public Signal signalPeek() {
+ if (!exec.signals.empty()) {
+ return exec.signals.peek();
+ }
+ return null;
+ }
+
+ /**
+ * Pop the current label
+ */
+ public String labelPop() {
+ if (!exec.labels.empty()) {
+ return exec.labels.pop();
+ }
+ return "";
+ }
+
+ /**
+ * Execute a SQL query (SELECT)
+ */
+ public Query executeQuery(ParserRuleContext ctx, Query query, String connProfile) {
+ if (!exec.offline) {
+ exec.rowCount = 0;
+ exec.conn.executeQuery(query, connProfile);
+ return query;
+ }
+ setSqlNoData();
+ info(ctx, "Not executed - offline mode set");
+ return query;
+ }
+
+ /**
+ * Register JARs, FILEs and CREATE TEMPORARY FUNCTION for UDF call
+ */
+ public void registerUdf() {
+ if (udfRegistered) {
+ return;
+ }
+ ArrayList sql = new ArrayList<>();
+ String dir = Utils.getExecDir();
+ String plsqlJarName = "plsql.jar";
+ for (String jarName : Objects.requireNonNull(new File(dir).list())) {
+ if (jarName.startsWith("doris-plsql") && jarName.endsWith(".jar")) {
+ plsqlJarName = jarName;
+ break;
+ }
+ }
+ sql.add("ADD JAR " + dir + plsqlJarName);
+ sql.add("ADD JAR " + dir + "antlr4-runtime-4.5.jar");
+ if (dotPlsqlrcExists) {
+ sql.add("ADD FILE " + dir + Conf.DOT_PLSQLRC);
+ }
+ if (plsqlrcExists) {
+ sql.add("ADD FILE " + dir + Conf.PLSQLRC);
+ }
+ String lu = createLocalUdf();
+ if (lu != null) {
+ sql.add("ADD FILE " + lu);
+ }
+ sql.add("CREATE TEMPORARY FUNCTION plsql AS 'org.apache.doris.udf.plsql.Udf'");
+ exec.conn.addPreSql(exec.conf.defaultConnection, sql);
+ udfRegistered = true;
+ }
+
+ /**
+ * Initialize options
+ */
+ void initOptions() {
+ for (Entry item : exec.conf) {
+ String key = item.getKey();
+ String value = item.getValue();
+ if (key == null || value == null || !key.startsWith("plsql.")) {
+ continue;
+ } else if (key.compareToIgnoreCase(Conf.CONN_DEFAULT) == 0) {
+ exec.conf.defaultConnection = value;
+ } else if (key.startsWith("plsql.conn.init.")) {
+ exec.conn.addConnectionInit(key.substring(17), value);
+ } else if (key.startsWith(Conf.CONN_CONVERT)) {
+ exec.conf.setConnectionConvert(key.substring(20), value);
+ } else if (key.startsWith("plsql.conn.")) {
+ String name = key.substring(12);
+ exec.conn.addConnection(name, value);
+ } else if (key.startsWith("plsql.")) {
+ exec.conf.setOption(key, value);
+ }
+ }
+ }
+
+ /**
+ * Set SQLCODE
+ */
+ public void setSqlCode(int sqlcode) {
+ Long code = (long) sqlcode;
+ Var var = findVariable(SQLCODE);
+ if (var != null) {
+ var.setValue(code);
+ }
+ var = findVariable(ERRORCODE);
+ if (var != null) {
+ var.setValue(code);
+ }
+ }
+
+ public void setSqlCode(Exception exception) {
+ if (exception instanceof QueryException) {
+ setSqlCode(((QueryException) exception).getErrorCode());
+ setSqlState(((QueryException) exception).getSQLState());
+ } else {
+ setSqlCode(SqlCodes.ERROR);
+ setSqlState("02000");
+ }
+ }
+
+ /**
+ * Set SQLSTATE
+ */
+ public void setSqlState(String sqlstate) {
+ Var var = findVariable(SQLSTATE);
+ if (var != null) {
+ var.setValue(sqlstate);
+ }
+ }
+
+ public void setResultListener(ResultListener resultListener) {
+ stmt.setResultListener(resultListener);
+ }
+
+ /**
+ * Set HOSTCODE
+ */
+ public void setHostCode(int code) {
+ Var var = findVariable(HOSTCODE);
+ if (var != null) {
+ var.setValue(Long.valueOf(code));
+ }
+ }
+
+ /**
+ * Set successful execution for SQL
+ */
+ public void setSqlSuccess() {
+ setSqlCode(SqlCodes.SUCCESS);
+ setSqlState("00000");
+ }
+
+ /**
+ * Set SQL_NO_DATA as the result of SQL execution
+ */
+ public void setSqlNoData() {
+ setSqlCode(SqlCodes.NO_DATA_FOUND);
+ setSqlState("01000");
+ }
+
+ public Integer run(String[] args) throws Exception {
+ if (!parseArguments(args)) {
+ return -1;
+ }
+ init();
+ try {
+ parseAndEval(arguments);
+ } finally {
+ close();
+ }
+ return getProgramReturnCode();
+ }
+
+ public Var parseAndEval(Arguments arguments) throws IOException {
+ ParseTree tree;
+ try {
+ CharStream input = sourceStream(arguments);
+ tree = parse(input);
+ } catch (IOException e) {
+ throw new UncheckedIOException(e);
+ }
+ Var result = null;
+ try {
+ result = evaluate(tree, arguments.main);
+ } catch (PlValidationException e) {
+ signal(Signal.Type.VALIDATION, e.getMessage(), e);
+ }
+ if (result != null) {
+ console.printLine(result.toString());
+ }
+ return result;
+ }
+
+ private Var evaluate(ParseTree tree, String execMain) {
+ if (tree == null) {
+ return null;
+ }
+ if (execMain != null) {
+ initRoutines = true;
+ visit(tree);
+ initRoutines = false;
+ exec.functions.exec(new FuncNameInfo(execMain), null);
+ } else {
+ visit(tree);
+ }
+ if (!exec.stack.isEmpty()) {
+ return exec.stackPop();
+ }
+ return null;
+ }
+
+ @Override
+ public void close() {
+ leaveScope();
+ cleanup();
+ printExceptions();
+ }
+
+ private CharStream sourceStream(Arguments arguments) throws IOException {
+ return arguments.execString != null
+ ? CharStreams.fromString(arguments.execString)
+ : CharStreams.fromFileName(arguments.fileName);
+ }
+
+ /**
+ * Initialize PL/HQL
+ */
+ public void init() {
+ enterGlobalScope();
+ // specify the default log4j2 properties file.
+ System.setProperty("log4j.configurationFile", "hive-log4j2.properties");
+ if (conf == null) {
+ conf = new Conf();
+ }
+ conf.init();
+ conn = new Conn(this);
+ meta = new Meta(this, queryExecutor);
+ initOptions();
+ logicalPlanBuilder = new PLSqlLogicalPlanBuilder();
+
+ expr = new Expression(this);
+ stmt = new Stmt(this, queryExecutor);
+ stmt.setResultListener(resultListener);
+ converter = new Converter(this);
+
+ builtinFunctions = new BuiltinFunctions(this, queryExecutor);
+ new FunctionDatetime(this, queryExecutor).register(builtinFunctions);
+ new FunctionMisc(this, queryExecutor).register(builtinFunctions);
+ new FunctionString(this, queryExecutor).register(builtinFunctions);
+ if (client != null) {
+ functions = new DorisFunctionRegistry(this, client, builtinFunctions);
+ packageRegistry = new DorisPackageRegistry(client);
+ } else {
+ functions = new InMemoryFunctionRegistry(this, builtinFunctions);
+ }
+ addVariable(new Var(ERRORCODE, Var.Type.BIGINT, 0L));
+ addVariable(new Var(SQLCODE, Var.Type.BIGINT, 0L));
+ addVariable(new Var(SQLSTATE, Var.Type.STRING, "00000"));
+ addVariable(new Var(HOSTCODE, Var.Type.BIGINT, 0L));
+ for (Map.Entry v : arguments.getVars().entrySet()) {
+ addVariable(new Var(v.getKey(), Var.Type.STRING, v.getValue()));
+ }
+ includeRcFile();
+ registerBuiltins();
+ }
+
+ private ParseTree parse(CharStream input) throws IOException {
+ PLLexer lexer = new PLLexer(new CaseInsensitiveStream(input));
+ CommonTokenStream tokens = new CommonTokenStream(lexer);
+ PLParser parser = newParser(tokens);
+ ParseTree tree = parser.program();
+ if (trace) {
+ console.printError("Parser tree: " + tree.toStringTree(parser));
+ }
+ return tree;
+ }
+
+ protected void registerBuiltins() {
+ Var dbmVar = new Var(Type.PL_OBJECT, "DBMS_OUTPUT");
+ DbmOutput dbms = DbmOutputClass.INSTANCE.newInstance();
+ dbms.initialize(console);
+ dbmVar.setValue(dbms);
+ dbmVar.setConstant(true);
+ addVariable(dbmVar);
+
+ Var utlFileVar = new Var(Type.PL_OBJECT, "UTL_FILE");
+ UtlFile utlFile = UtlFileClass.INSTANCE.newInstance();
+ utlFileVar.setValue(utlFile);
+ utlFileVar.setConstant(true);
+ addVariable(utlFileVar);
+ }
+
+ private PLParser newParser(CommonTokenStream tokens) {
+ PLParser parser = new PLParser(tokens);
+ // the default listener logs into stdout, overwrite it with a custom listener that uses beeline console
+ parser.removeErrorListeners();
+ parser.addErrorListener(new SyntaxErrorReporter(console));
+ return parser;
+ }
+
+ /**
+ * Parse command line arguments
+ */
+ boolean parseArguments(String[] args) {
+ boolean parsed = arguments.parse(args);
+ if (parsed && arguments.hasVersionOption()) {
+ console.printError(VERSION);
+ return false;
+ }
+ if (!parsed || arguments.hasHelpOption()
+ || (arguments.getExecString() == null && arguments.getFileName() == null)) {
+ arguments.printHelp();
+ return false;
+ }
+ String execString = arguments.getExecString();
+ String execFile = arguments.getFileName();
+ if (arguments.hasTraceOption()) {
+ trace = true;
+ }
+ if (arguments.hasOfflineOption()) {
+ offline = true;
+ }
+ if (execString != null && execFile != null) {
+ console.printError("The '-e' and '-f' options cannot be specified simultaneously.");
+ return false;
+ }
+ return true;
+ }
+
+ /**
+ * Include statements from .plsqlrc and plsql rc files
+ */
+ void includeRcFile() {
+ if (includeFile(Conf.DOT_PLSQLRC, false)) {
+ dotPlsqlrcExists = true;
+ } else {
+ if (includeFile(Conf.PLSQLRC, false)) {
+ plsqlrcExists = true;
+ }
+ }
+ if (udfRun) {
+ includeFile(Conf.PLSQL_LOCALS_SQL, true);
+ }
+ }
+
+ /**
+ * Include statements from a file
+ */
+ boolean includeFile(String file, boolean showError) {
+ try {
+ String content = FileUtils.readFileToString(new java.io.File(file), "UTF-8");
+ if (content != null && !content.isEmpty()) {
+ if (trace) {
+ trace(null, "INCLUDE CONTENT " + file + " (non-empty)");
+ }
+ new Exec(this).include(content);
+ return true;
+ }
+ } catch (Exception e) {
+ if (showError) {
+ error(null, "INCLUDE file error: " + e.getMessage());
+ }
+ }
+ return false;
+ }
+
+ /**
+ * Execute statements from an include file
+ */
+ void include(String content) throws Exception {
+ InputStream input = new ByteArrayInputStream(content.getBytes(StandardCharsets.UTF_8));
+ PLLexer lexer = new PLLexer(new ANTLRInputStream(input));
+ CommonTokenStream tokens = new CommonTokenStream(lexer);
+ PLParser parser = newParser(tokens);
+ ParseTree tree = parser.program();
+ visit(tree);
+ }
+
+ /**
+ * Start executing HPL/SQL script
+ */
+ @Override
+ public Integer visitProgram(ProgramContext ctx) {
+ return visitChildren(ctx);
+ }
+
+ /**
+ * Enter BEGIN-END block
+ */
+ @Override
+ public Integer visitBegin_end_block(Begin_end_blockContext ctx) {
+ enterScope(Scope.Type.BEGIN_END);
+ Integer rc = visitChildren(ctx);
+ leaveScope();
+ return rc;
+ }
+
+ /**
+ * Free resources before exit
+ */
+ void cleanup() {
+ for (Map.Entry i : managedTables.entrySet()) {
+ String sql = "DROP TABLE IF EXISTS " + i.getValue();
+ QueryResult query = queryExecutor.executeQuery(sql, null);
+ query.close();
+ if (trace) {
+ trace(null, sql);
+ }
+ }
+ }
+
+ /**
+ * Output information about unhandled exceptions
+ */
+ public void printExceptions() {
+ while (!signals.empty()) {
+ Signal sig = signals.pop();
+ if (sig.type == Signal.Type.VALIDATION) {
+ error(((PlValidationException) sig.exception).getCtx(), sig.exception.getMessage());
+ } else if (sig.type == Signal.Type.SQLEXCEPTION) {
+ console.printError("Unhandled exception in PL/SQL. " + ExceptionUtils.getStackTrace(sig.exception));
+ } else if (sig.type == Signal.Type.UNSUPPORTED_OPERATION) {
+ console.printError(sig.value == null ? "Unsupported operation" : sig.value);
+ } else if (sig.exception != null) {
+ console.printError("HPL/SQL error: " + ExceptionUtils.getStackTrace(sig.exception));
+ } else if (sig.value != null) {
+ console.printError(sig.value);
+ } else {
+ trace(null, "Signal: " + sig.type);
+ }
+ }
+ }
+
+ /**
+ * Get the program return code
+ */
+ Integer getProgramReturnCode() {
+ int rc = 0;
+ if (!signals.empty()) {
+ Signal sig = signals.pop();
+ if ((sig.type == Signal.Type.LEAVE_PROGRAM || sig.type == Signal.Type.LEAVE_ROUTINE)
+ && sig.value != null) {
+ try {
+ rc = Integer.parseInt(sig.value);
+ } catch (NumberFormatException e) {
+ rc = 1;
+ }
+ }
+ }
+ return rc;
+ }
+
+ /**
+ * Executing a statement
+ */
+ @Override
+ public Integer visitStmt(StmtContext ctx) {
+ if (ctx.semicolon_stmt() != null) {
+ return 0;
+ }
+ if (initRoutines && ctx.create_procedure_stmt() == null && ctx.create_function_stmt() == null) {
+ return 0;
+ }
+ if (exec.resignal) {
+ if (exec.currentScope != exec.currentHandlerScope.parent) {
+ return 0;
+ }
+ exec.resignal = false;
+ }
+ if (!exec.signals.empty() && exec.conf.onError != OnError.SETERROR) {
+ if (!runContinueHandler()) {
+ return 0;
+ }
+ }
+ Var prev = stackPop();
+ if (prev != null && prev.value != null) {
+ console.printLine(prev.toString());
+ }
+ return visitChildren(ctx);
+ }
+
+ @Override
+ public Integer visitDoris_statement(Doris_statementContext ctx) {
+ Integer rc = exec.stmt.statement(ctx);
+ if (rc != 0) {
+ printExceptions();
+ throw new RuntimeException(exec.signalPeek().getValue());
+ }
+ // Sometimes the query results are not returned to the mysql client,
+ // such as declare result; select … into result;
+ resultListener.onFinalize();
+ console.flushConsole(); // if running from plsql.sh
+ return rc;
+ }
+
+ /**
+ * Executing SELECT statement
+ */
+ @Override
+ public Integer visitQuery(QueryContext ctx) {
+ return exec.stmt.statement(ctx);
+ }
+
+ /**
+ * EXCEPTION block
+ */
+ @Override
+ public Integer visitException_block_item(Exception_block_itemContext ctx) {
+ if (exec.signals.empty()) {
+ return 0;
+ }
+ if (exec.conf.onError == OnError.SETERROR || exec.conf.onError == OnError.STOP) {
+ exec.signals.pop();
+ return 0;
+ }
+ if (ctx.IDENTIFIER().toString().equalsIgnoreCase("OTHERS")) {
+ trace(ctx, "EXCEPTION HANDLER");
+ exec.signals.pop();
+ enterScope(Scope.Type.HANDLER);
+ visit(ctx.block());
+ leaveScope();
+ }
+ return 0;
+ }
+
+ private List visit(List extends ParserRuleContext> contexts, Class clazz) {
+ return contexts.stream()
+ .map(this::visit)
+ .map(clazz::cast)
+ .collect(ImmutableList.toImmutableList());
+ }
+
+ public List getNamedExpressions(NamedExpressionSeqContext namedCtx) {
+ return ParserUtils.withOrigin(namedCtx, () -> visit(namedCtx.namedExpression(), NamedExpression.class));
+ }
+
+ /**
+ * DECLARE variable statement
+ */
+ @Override
+ public Integer visitDeclare_var_item(Declare_var_itemContext ctx) {
+ String type = null;
+ TableClass userDefinedType = null;
+ Row row = null;
+ String len = null;
+ String scale = null;
+ Var defaultVar = null;
+ if (ctx.dtype().ROWTYPE() != null) {
+ row = meta.getRowDataType(ctx, exec.conf.defaultConnection, ctx.dtype().qident().getText());
+ if (row == null) {
+ type = Var.DERIVED_ROWTYPE;
+ }
+ } else {
+ type = getDataType(ctx);
+ if (ctx.dtype_len() != null) {
+ len = ctx.dtype_len().INTEGER_VALUE(0).getText();
+ if (ctx.dtype_len().INTEGER_VALUE(1) != null) {
+ scale = ctx.dtype_len().INTEGER_VALUE(1).getText();
+ }
+ }
+ if (ctx.dtype_default() != null) {
+ defaultVar = evalPop(ctx.dtype_default());
+ }
+ userDefinedType = types.get(type);
+ if (userDefinedType != null) {
+ type = Type.PL_OBJECT.name();
+ }
+
+ }
+ int cnt = ctx.ident_pl().size(); // Number of variables declared with the same data type and default
+ for (int i = 0; i < cnt; i++) {
+ String name = ctx.ident_pl(i).getText();
+ if (row == null) {
+ Var var = new Var(name, type, len, scale, defaultVar);
+ if (userDefinedType != null && defaultVar == null) {
+ var.setValue(userDefinedType.newInstance());
+ }
+ exec.addVariable(var);
+ if (ctx.CONSTANT() != null) {
+ var.setConstant(true);
+ }
+ if (trace) {
+ if (defaultVar != null) {
+ trace(ctx, "DECLARE " + name + " " + type + " = " + var.toSqlString());
+ } else {
+ trace(ctx, "DECLARE " + name + " " + type);
+ }
+ }
+ } else {
+ exec.addVariable(new Var(name, row));
+ if (trace) {
+ trace(ctx, "DECLARE " + name + " " + ctx.dtype().getText());
+ }
+ }
+ }
+ return 0;
+ }
+
+ /**
+ * Get the variable data type
+ */
+ String getDataType(Declare_var_itemContext ctx) {
+ String type;
+ if (ctx.dtype().TYPE() != null) {
+ type = meta.getDataType(ctx, exec.conf.defaultConnection, ctx.dtype().qident().getText());
+ if (type == null) {
+ type = Var.DERIVED_TYPE;
+ }
+ } else {
+ type = getFormattedText(ctx.dtype());
+ }
+ return type;
+ }
+
+ /**
+ * ALLOCATE CURSOR statement
+ */
+ @Override
+ public Integer visitAllocate_cursor_stmt(Allocate_cursor_stmtContext ctx) {
+ return exec.stmt.allocateCursor(ctx);
+ }
+
+ /**
+ * ASSOCIATE LOCATOR statement
+ */
+ @Override
+ public Integer visitAssociate_locator_stmt(Associate_locator_stmtContext ctx) {
+ return exec.stmt.associateLocator(ctx);
+ }
+
+ /**
+ * DECLARE cursor statement
+ */
+ @Override
+ public Integer visitDeclare_cursor_item(Declare_cursor_itemContext ctx) {
+ return exec.stmt.declareCursor(ctx);
+ }
+
+ /**
+ * OPEN cursor statement
+ */
+ @Override
+ public Integer visitOpen_stmt(Open_stmtContext ctx) {
+ return exec.stmt.open(ctx);
+ }
+
+ /**
+ * FETCH cursor statement
+ */
+ @Override
+ public Integer visitFetch_stmt(Fetch_stmtContext ctx) {
+ return exec.stmt.fetch(ctx);
+ }
+
+ /**
+ * CLOSE cursor statement
+ */
+ @Override
+ public Integer visitClose_stmt(Close_stmtContext ctx) {
+ return exec.stmt.close(ctx);
+ }
+
+ /**
+ * DECLARE HANDLER statement
+ */
+ @Override
+ public Integer visitDeclare_handler_item(Declare_handler_itemContext ctx) {
+ trace(ctx, "DECLARE HANDLER");
+ Handler.ExecType execType = Handler.ExecType.EXIT;
+ Signal.Type type = Signal.Type.SQLEXCEPTION;
+ String value = null;
+ if (ctx.CONTINUE() != null) {
+ execType = Handler.ExecType.CONTINUE;
+ }
+ if (ctx.ident_pl() != null) {
+ type = Signal.Type.USERDEFINED;
+ value = ctx.ident_pl().getText();
+ } else if (ctx.NOT() != null && ctx.FOUND() != null) {
+ type = Signal.Type.NOTFOUND;
+ }
+ addHandler(new Handler(execType, type, value, exec.currentScope, ctx));
+ return 0;
+ }
+
+ /**
+ * DECLARE CONDITION
+ */
+ @Override
+ public Integer visitDeclare_condition_item(Declare_condition_itemContext ctx) {
+ return 0;
+ }
+
+ /**
+ * CREATE FUNCTION statement
+ */
+ @Override
+ public Integer visitCreate_function_stmt(Create_function_stmtContext ctx) {
+ exec.functions.addUserFunction(ctx);
+ addLocalUdf(ctx);
+ return 0;
+ }
+
+ /**
+ * CREATE PACKAGE specification statement
+ */
+ @Override
+ public Integer visitCreate_package_stmt(Create_package_stmtContext ctx) {
+ FuncNameInfo procedureName = new FuncNameInfo(
+ exec.logicalPlanBuilder.visitMultipartIdentifier(ctx.multipartIdentifier()));
+ if (exec.packageLoading) {
+ exec.currentPackageDecl = new Package(procedureName.toString(), exec, builtinFunctions);
+ exec.packages.put(procedureName.toString(), exec.currentPackageDecl);
+ exec.currentPackageDecl.createSpecification(ctx);
+ exec.currentPackageDecl = null;
+ } else {
+ trace(ctx, "CREATE PACKAGE");
+ exec.packages.remove(procedureName.toString());
+ exec.packageRegistry.createPackageHeader(procedureName.toString(), getFormattedText(ctx),
+ ctx.REPLACE() != null);
+ }
+ return 0;
+ }
+
+ /**
+ * CREATE PACKAGE body statement
+ */
+ @Override
+ public Integer visitCreate_package_body_stmt(
+ Create_package_body_stmtContext ctx) {
+ FuncNameInfo procedureName = new FuncNameInfo(
+ exec.logicalPlanBuilder.visitMultipartIdentifier(ctx.multipartIdentifier()));
+ if (exec.packageLoading) {
+ exec.currentPackageDecl = exec.packages.get(procedureName.toString());
+ if (exec.currentPackageDecl == null) {
+ exec.currentPackageDecl = new Package(procedureName.toString(), exec, builtinFunctions);
+ exec.currentPackageDecl.setAllMembersPublic(true);
+ exec.packages.put(procedureName.toString(), exec.currentPackageDecl);
+ }
+ exec.currentPackageDecl.createBody(ctx);
+ exec.currentPackageDecl = null;
+ } else {
+ trace(ctx, "CREATE PACKAGE BODY");
+ exec.packages.remove(procedureName.toString());
+ exec.packageRegistry.createPackageBody(procedureName.toString(), getFormattedText(ctx),
+ ctx.REPLACE() != null);
+ }
+ return 0;
+ }
+
+ /**
+ * CREATE PROCEDURE statement
+ */
+ @Override
+ public Integer visitCreate_procedure_stmt(Create_procedure_stmtContext ctx) {
+ exec.functions.addUserProcedure(ctx);
+ addLocalUdf(ctx); // Add procedures as they can be invoked by functions
+ return 0;
+ }
+
+ /**
+ * Add functions and procedures defined in the current script
+ */
+ void addLocalUdf(ParserRuleContext ctx) {
+ if (exec == this) {
+ localUdf.append(Exec.getFormattedText(ctx));
+ localUdf.append("\n");
+ }
+ }
+
+ /**
+ * Save local functions and procedures to a file (will be added to the distributed cache)
+ */
+ String createLocalUdf() {
+ if (localUdf.length() == 0) {
+ return null;
+ }
+ try {
+ String file = System.getProperty("user.dir") + "/" + Conf.PLSQL_LOCALS_SQL;
+ PrintWriter writer = new PrintWriter(file, "UTF-8");
+ writer.print(localUdf);
+ writer.close();
+ return file;
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ return null;
+ }
+
+ @Override
+ public Integer visitSet_doris_session_option(
+ Set_doris_session_optionContext ctx) {
+ StringBuilder sql = new StringBuilder("set ");
+ for (int i = 0; i < ctx.getChildCount(); i++) {
+ sql.append(ctx.getChild(i).getText()).append(" ");
+ }
+ QueryResult query = queryExecutor.executeQuery(sql.toString(), ctx); // Send to doris for execution
+ if (query.error()) {
+ exec.signal(query);
+ return 1;
+ }
+ exec.setSqlSuccess();
+ if (trace) {
+ trace(ctx, sql.toString());
+ }
+ return 0;
+ }
+
+ /**
+ * Assignment statement for single value
+ */
+ @Override
+ public Integer visitAssignment_stmt_single_item(
+ Assignment_stmt_single_itemContext ctx) {
+ String name = ctx.ident_pl().getText();
+ visit(ctx.expr());
+ Var var = setVariable(name);
+ if (trace) {
+ trace(ctx, "SET " + name + " = " + var.toSqlString());
+ }
+ return 0;
+ }
+
+ /**
+ * Assignment statement for multiple values
+ */
+ @Override
+ public Integer visitAssignment_stmt_multiple_item(
+ Assignment_stmt_multiple_itemContext ctx) {
+ int cnt = ctx.ident_pl().size();
+ int ecnt = ctx.expr().size();
+ for (int i = 0; i < cnt; i++) {
+ String name = ctx.ident_pl(i).getText();
+ if (i < ecnt) {
+ visit(ctx.expr(i));
+ Var var = setVariable(name);
+ if (trace) {
+ trace(ctx, "SET " + name + " = " + var.toString());
+ }
+ }
+ }
+ return 0;
+ }
+
+ /**
+ * Assignment from SELECT statement
+ */
+ @Override
+ public Integer visitAssignment_stmt_select_item(
+ Assignment_stmt_select_itemContext ctx) {
+ return stmt.assignFromSelect(ctx);
+ }
+
+ @Override
+ public Integer visitAssignment_stmt_collection_item(
+ Assignment_stmt_collection_itemContext ctx) {
+ Expr_funcContext lhs = ctx.expr_func();
+ Var var = findVariable(lhs.multipartIdentifier().getText());
+ if (var == null || var.type != Type.PL_OBJECT) {
+ stackPush(Var.Null);
+ return 0;
+ }
+ MethodParams.Arity.UNARY.check(lhs.multipartIdentifier().getText(), lhs.expr_func_params().func_param());
+ Var index = evalPop(lhs.expr_func_params().func_param(0));
+ Var value = evalPop(ctx.expr());
+ dispatch(ctx, (PlObject) var.value, MethodDictionary.__SETITEM__, Arrays.asList(index, value));
+ return 0;
+ }
+
+ /**
+ * Evaluate an expression
+ */
+ @Override
+ public Integer visitExpr(ExprContext ctx) {
+ exec.expr.exec(ctx);
+ return 0;
+ }
+
+ /**
+ * Evaluate a boolean expression
+ */
+ @Override
+ public Integer visitBool_expr(Bool_exprContext ctx) {
+ exec.expr.execBool(ctx);
+ return 0;
+ }
+
+ @Override
+ public Integer visitBool_expr_binary(Bool_expr_binaryContext ctx) {
+ exec.expr.execBoolBinary(ctx);
+ return 0;
+ }
+
+ @Override
+ public Integer visitBool_expr_unary(Bool_expr_unaryContext ctx) {
+ exec.expr.execBoolUnary(ctx);
+ return 0;
+ }
+
+ /**
+ * Cursor attribute %ISOPEN, %FOUND and %NOTFOUND
+ */
+ @Override
+ public Integer visitExpr_cursor_attribute(Expr_cursor_attributeContext ctx) {
+ exec.expr.execCursorAttribute(ctx);
+ return 0;
+ }
+
+ /**
+ * Function call
+ */
+ @Override
+ public Integer visitExpr_func(Expr_funcContext ctx) {
+ return functionCall(ctx, ctx.multipartIdentifier(), ctx.expr_func_params());
+ }
+
+ private int functionCall(ParserRuleContext ctx, MultipartIdentifierContext ident,
+ Expr_func_paramsContext params) {
+ List nameParts = logicalPlanBuilder.visitMultipartIdentifier(ident);
+ FuncNameInfo procedureName = new FuncNameInfo(nameParts);
+ Package packCallContext = exec.getPackageCallContext();
+ boolean executed = false;
+ Package pack = findPackage(procedureName.getDb());
+ if (pack != null) {
+ executed = pack.execFunc(procedureName.getName(), params);
+ }
+ if (!executed && packCallContext != null) {
+ executed = packCallContext.execFunc(procedureName.toString(), params);
+ }
+ if (!executed) {
+ if (!exec.functions.exec(procedureName, params)) {
+ Var var = findVariable(procedureName.toString());
+ if (var != null && var.type == Type.PL_OBJECT) {
+ stackPush(dispatch(ctx, (PlObject) var.value, MethodDictionary.__GETITEM__, params));
+ } else {
+ throw new UndefinedIdentException(ctx, procedureName.toString());
+ }
+ }
+ }
+ return 0;
+ }
+
+ private Var dispatch(ParserRuleContext ctx, PlObject obj, String methodName,
+ Expr_func_paramsContext paramCtx) {
+ List params = paramCtx == null
+ ? Collections.emptyList()
+ : paramCtx.func_param().stream().map(this::evalPop).collect(Collectors.toList());
+ return dispatch(ctx, obj, methodName, params);
+ }
+
+ private Var dispatch(ParserRuleContext ctx, PlObject obj, String methodName, List params) {
+ Method method = obj.plClass().methodDictionary().get(ctx, methodName);
+ return method.call(obj, params);
+ }
+
+ /**
+ * @return either 1 rowtype OR 1 single column table OR n single column tables
+ */
+ public List
intoTables(ParserRuleContext ctx, List names) {
+ List
tables = new ArrayList<>();
+ for (String name : names) {
+ Var var = findVariable(name);
+ if (var == null) {
+ trace(ctx, "Variable not found: " + name);
+ } else if (var.type == Type.PL_OBJECT && var.value instanceof Table) {
+ tables.add((Table) var.value);
+ } else {
+ throw new TypeException(ctx, Table.class, var.type, var.value);
+ }
+ }
+ if (tables.size() > 1 && tables.stream().anyMatch(tbl -> tbl.plClass().rowType())) {
+ throw new TypeException(ctx, "rowtype table should not be used when selecting into multiple tables");
+ }
+ return tables;
+ }
+
+ /**
+ * Aggregate or window function call
+ */
+ @Override
+ public Integer visitExpr_agg_window_func(Expr_agg_window_funcContext ctx) {
+ exec.stackPush(Exec.getFormattedText(ctx));
+ return 0;
+ }
+
+ /**
+ * Function with specific syntax
+ */
+ @Override
+ public Integer visitExpr_spec_func(Expr_spec_funcContext ctx) {
+ exec.builtinFunctions.specExec(ctx);
+ return 0;
+ }
+
+ /**
+ * INCLUDE statement
+ */
+ @Override
+ public Integer visitInclude_stmt(@NotNull Include_stmtContext ctx) {
+ return exec.stmt.include(ctx);
+ }
+
+ /**
+ * IF statement (PL/SQL syntax)
+ */
+ @Override
+ public Integer visitIf_plsql_stmt(If_plsql_stmtContext ctx) {
+ return exec.stmt.ifPlsql(ctx);
+ }
+
+ /**
+ * IF statement (Transact-SQL syntax)
+ */
+ @Override
+ public Integer visitIf_tsql_stmt(If_tsql_stmtContext ctx) {
+ return exec.stmt.ifTsql(ctx);
+ }
+
+ /**
+ * IF statement (BTEQ syntax)
+ */
+ @Override
+ public Integer visitIf_bteq_stmt(If_bteq_stmtContext ctx) {
+ return exec.stmt.ifBteq(ctx);
+ }
+
+
+ /**
+ * VALUES statement
+ */
+ @Override
+ public Integer visitValues_into_stmt(Values_into_stmtContext ctx) {
+ return exec.stmt.values(ctx);
+ }
+
+ /**
+ * WHILE statement
+ */
+ @Override
+ public Integer visitWhile_stmt(While_stmtContext ctx) {
+ return exec.stmt.while_(ctx);
+ }
+
+ @Override
+ public Integer visitUnconditional_loop_stmt(
+ Unconditional_loop_stmtContext ctx) {
+ return exec.stmt.unconditionalLoop(ctx);
+ }
+
+ /**
+ * FOR cursor statement
+ */
+ @Override
+ public Integer visitFor_cursor_stmt(For_cursor_stmtContext ctx) {
+ return exec.stmt.forCursor(ctx);
+ }
+
+ /**
+ * FOR (integer range) statement
+ */
+ @Override
+ public Integer visitFor_range_stmt(For_range_stmtContext ctx) {
+ return exec.stmt.forRange(ctx);
+ }
+
+ /**
+ * EXEC, EXECUTE and EXECUTE IMMEDIATE statement to execute dynamic SQL
+ */
+ @Override
+ public Integer visitExec_stmt(Exec_stmtContext ctx) {
+ exec.inCallStmt = true;
+ Integer rc = exec.stmt.exec(ctx);
+ exec.inCallStmt = false;
+ return rc;
+ }
+
+ /**
+ * CALL statement
+ */
+ @Override
+ public Integer visitCall_stmt(Call_stmtContext ctx) {
+ exec.inCallStmt = true;
+ try {
+ if (ctx.expr_func() != null) {
+ functionCall(ctx, ctx.expr_func().multipartIdentifier(), ctx.expr_func().expr_func_params());
+ } else if (ctx.expr_dot() != null) {
+ visitExpr_dot(ctx.expr_dot());
+ } else if (ctx.multipartIdentifier() != null) {
+ functionCall(ctx, ctx.multipartIdentifier(), null);
+ }
+ } finally {
+ exec.inCallStmt = false;
+ }
+ return 0;
+ }
+
+ /**
+ * EXIT statement (leave the specified loop with a condition)
+ */
+ @Override
+ public Integer visitExit_stmt(Exit_stmtContext ctx) {
+ return exec.stmt.exit(ctx);
+ }
+
+ /**
+ * BREAK statement (leave the innermost loop unconditionally)
+ */
+ @Override
+ public Integer visitBreak_stmt(Break_stmtContext ctx) {
+ return exec.stmt.break_(ctx);
+ }
+
+ /**
+ * LEAVE statement (leave the specified loop unconditionally)
+ */
+ @Override
+ public Integer visitLeave_stmt(Leave_stmtContext ctx) {
+ return exec.stmt.leave(ctx);
+ }
+
+ /**
+ * PRINT statement
+ */
+ @Override
+ public Integer visitPrint_stmt(Print_stmtContext ctx) {
+ return exec.stmt.print(ctx);
+ }
+
+ /**
+ * QUIT statement
+ */
+ @Override
+ public Integer visitQuit_stmt(Quit_stmtContext ctx) {
+ return exec.stmt.quit(ctx);
+ }
+
+ /**
+ * SIGNAL statement
+ */
+ @Override
+ public Integer visitSignal_stmt(Signal_stmtContext ctx) {
+ return exec.stmt.signal(ctx);
+ }
+
+ /**
+ * RESIGNAL statement
+ */
+ @Override
+ public Integer visitResignal_stmt(Resignal_stmtContext ctx) {
+ return exec.stmt.resignal(ctx);
+ }
+
+ /**
+ * RETURN statement
+ */
+ @Override
+ public Integer visitReturn_stmt(Return_stmtContext ctx) {
+ return exec.stmt.return_(ctx);
+ }
+
+ /**
+ * SET session options
+ */
+ @Override
+ public Integer visitSet_current_schema_option(
+ Set_current_schema_optionContext ctx) {
+ return exec.stmt.setCurrentSchema(ctx);
+ }
+
+ private void addType(TableClass tableClass) {
+ types.put(tableClass.typeName(), tableClass);
+ }
+
+ public TableClass getType(String name) {
+ return types.get(name);
+ }
+
+ /**
+ * MAP OBJECT statement
+ */
+ @Override
+ public Integer visitMap_object_stmt(Map_object_stmtContext ctx) {
+ String source = ctx.ident_pl(0).getText();
+ String target = null;
+ String conn = null;
+ if (ctx.TO() != null) {
+ target = ctx.ident_pl(1).getText();
+ exec.objectMap.put(source.toUpperCase(), target);
+ }
+ if (ctx.AT() != null) {
+ if (ctx.TO() == null) {
+ conn = ctx.ident_pl(1).getText();
+ } else {
+ conn = ctx.ident_pl(2).getText();
+ }
+ exec.objectConnMap.put(source.toUpperCase(), conn);
+ }
+ if (trace) {
+ String log = "MAP OBJECT " + source;
+ if (target != null) {
+ log += " AS " + target;
+ }
+ if (conn != null) {
+ log += " AT " + conn;
+ }
+ trace(ctx, log);
+ }
+ return 0;
+ }
+
+ /**
+ * Executing OS command
+ */
+ @Override
+ public Integer visitHost_cmd(Host_cmdContext ctx) {
+ trace(ctx, "HOST");
+ execHost(ctx, ctx.start.getInputStream().getText(
+ new org.antlr.v4.runtime.misc.Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex())));
+ return 0;
+ }
+
+ @Override
+ public Integer visitHost_stmt(Host_stmtContext ctx) {
+ trace(ctx, "HOST");
+ execHost(ctx, evalPop(ctx.expr()).toString());
+ return 0;
+ }
+
+ public void execHost(ParserRuleContext ctx, String cmd) {
+ try {
+ if (trace) {
+ trace(ctx, "HOST Command: " + cmd);
+ }
+ Process p = Runtime.getRuntime().exec(cmd);
+ new StreamGobbler(p.getInputStream(), console).start();
+ new StreamGobbler(p.getErrorStream(), console).start();
+ int rc = p.waitFor();
+ if (trace) {
+ trace(ctx, "HOST Process exit code: " + rc);
+ }
+ setHostCode(rc);
+ } catch (Exception e) {
+ setHostCode(1);
+ signal(Signal.Type.SQLEXCEPTION);
+ }
+ }
+
+ /**
+ * Standalone expression (as a statement)
+ */
+ @Override
+ public Integer visitExpr_stmt(Expr_stmtContext ctx) {
+ visitChildren(ctx);
+ return 0;
+ }
+
+ /**
+ * String concatenation operator
+ */
+ @Override
+ public Integer visitExpr_concat(Expr_concatContext ctx) {
+ exec.expr.operatorConcat(ctx);
+ return 0;
+ }
+
+ @Override
+ public Integer visitExpr_dot_method_call(Expr_dot_method_callContext ctx) {
+ Var var = ctx.ident_pl() != null
+ ? findVariable(ctx.ident_pl().getText())
+ : evalPop(ctx.expr_func(0));
+
+ if (var == null && ctx.ident_pl() != null) {
+ Package pkg = findPackage(ctx.ident_pl().getText());
+ String pkgFuncName = ctx.expr_func(0).multipartIdentifier().getText().toUpperCase();
+ boolean executed = pkg.execFunc(pkgFuncName, ctx.expr_func(0).expr_func_params());
+ Package packCallContext = exec.getPackageCallContext();
+ if (!executed && packCallContext != null) {
+ packCallContext.execFunc(pkgFuncName, ctx.expr_func(0).expr_func_params());
+ }
+ return 0;
+ }
+
+ Expr_funcContext method = ctx.expr_func(ctx.expr_func().size() - 1);
+ switch (var.type) {
+ case PL_OBJECT:
+ Var result = dispatch(ctx, (PlObject) var.value, method.multipartIdentifier().getText(),
+ method.expr_func_params());
+ stackPush(result);
+ return 0;
+ default:
+ throw new TypeException(ctx, var.type + " is not an object");
+ }
+ }
+
+ @Override
+ public Integer visitExpr_dot_property_access(
+ Expr_dot_property_accessContext ctx) {
+ Var var = ctx.expr_func() != null
+ ? evalPop(ctx.expr_func())
+ : findVariable(ctx.ident_pl(0).getText());
+ String property = ctx.ident_pl(ctx.ident_pl().size() - 1).getText();
+
+ if (var == null && ctx.expr_func() == null) {
+ Package pkg = findPackage(ctx.ident_pl(0).getText());
+ Var variable = pkg.findVariable(property);
+ if (variable != null) {
+ stackPush(variable);
+ } else {
+ Package packCallContext = exec.getPackageCallContext();
+ stackPush(packCallContext.findVariable(property));
+ }
+ return 0;
+ }
+
+ switch (var.type) {
+ case PL_OBJECT:
+ Var result = dispatch(ctx, (PlObject) var.value, property, Collections.emptyList());
+ stackPush(result);
+ return 0;
+ case ROW:
+ stackPush(((Row) var.value).getValue(property));
+ return 0;
+ default:
+ throw new TypeException(ctx, var.type + " is not an object/row");
+ }
+ }
+
+ /**
+ * Simple CASE expression
+ */
+ @Override
+ public Integer visitExpr_case_simple(Expr_case_simpleContext ctx) {
+ exec.expr.execSimpleCase(ctx);
+ return 0;
+ }
+
+ /**
+ * Searched CASE expression
+ */
+ @Override
+ public Integer visitExpr_case_searched(Expr_case_searchedContext ctx) {
+ exec.expr.execSearchedCase(ctx);
+ return 0;
+ }
+
+ /**
+ * GET DIAGNOSTICS EXCEPTION statement
+ */
+ @Override
+ public Integer visitGet_diag_stmt_exception_item(
+ Get_diag_stmt_exception_itemContext ctx) {
+ return exec.stmt.getDiagnosticsException(ctx);
+ }
+
+ /**
+ * GET DIAGNOSTICS ROW_COUNT statement
+ */
+ @Override
+ public Integer visitGet_diag_stmt_rowcount_item(
+ Get_diag_stmt_rowcount_itemContext ctx) {
+ return exec.stmt.getDiagnosticsRowCount(ctx);
+ }
+
+ /**
+ * Label
+ */
+ @Override
+ public Integer visitLabel_stmt(Label_stmtContext ctx) {
+ if (ctx.IDENTIFIER() != null) {
+ exec.labels.push(ctx.IDENTIFIER().toString());
+ } else {
+ String label = ctx.LABEL_PL().getText();
+ if (label.endsWith(":")) {
+ label = label.substring(0, label.length() - 1);
+ }
+ exec.labels.push(label);
+ }
+ return 0;
+ }
+
+ /**
+ * Identifier
+ */
+ @Override
+ public Integer visitIdent_pl(Ident_plContext ctx) {
+ boolean hasSub = false;
+ String ident = ctx.getText();
+ String actualIdent = ident;
+ if (ident.startsWith("-")) {
+ hasSub = true;
+ actualIdent = ident.substring(1);
+ }
+
+ Var var = findVariable(actualIdent);
+ if (var != null) { // Use previously saved variables
+ if (hasSub) {
+ Var var1 = new Var(var);
+ var1.negate();
+ exec.stackPush(var1);
+ } else {
+ exec.stackPush(var);
+ }
+ } else {
+ if (exec.inCallStmt) {
+ exec.stackPush(new Var(Var.Type.IDENT, ident));
+ } else {
+ if (!exec.functions.exec(new FuncNameInfo(ident), null)) {
+ throw new UndefinedIdentException(ctx, ident);
+ }
+ }
+ }
+ return 0;
+ }
+
+ /**
+ * string literal
+ */
+ @Override
+ public Integer visitString(StringContext ctx) {
+ exec.stackPush(Utils.unquoteString(ctx.getText()));
+ return 0;
+ }
+
+ /**
+ * Integer literal, signed or unsigned
+ */
+ @Override
+ public Integer visitInt_number(Int_numberContext ctx) {
+ exec.stack.push(new Var(Long.valueOf(ctx.getText())));
+ return 0;
+ }
+
+ /**
+ * Interval expression (INTERVAL '1' DAY i.e)
+ */
+ @Override
+ public Integer visitExpr_interval(Expr_intervalContext ctx) {
+ int num = evalPop(ctx.expr()).intValue();
+ Interval interval = new Interval().set(num, ctx.interval_item().getText());
+ stackPush(new Var(interval));
+ return 0;
+ }
+
+ /**
+ * Decimal literal, signed or unsigned
+ */
+ @Override
+ public Integer visitDec_number(Dec_numberContext ctx) {
+ stackPush(new Var(new BigDecimal(ctx.getText())));
+ return 0;
+ }
+
+ /**
+ * Boolean literal
+ */
+ @Override
+ public Integer visitBool_literal(Bool_literalContext ctx) {
+ boolean val = true;
+ if (ctx.FALSE() != null) {
+ val = false;
+ }
+ stackPush(new Var(val));
+ return 0;
+ }
+
+ /**
+ * NULL constant
+ */
+ @Override
+ public Integer visitNull_const(Null_constContext ctx) {
+ stackPush(new Var());
+ return 0;
+ }
+
+ /**
+ * DATE 'YYYY-MM-DD' literal
+ */
+ @Override
+ public Integer visitDate_literal(Date_literalContext ctx) {
+ String str = evalPop(ctx.string()).toString();
+ stackPush(new Var(Var.Type.DATE, Utils.toDate(str)));
+ return 0;
+ }
+
+ /**
+ * TIMESTAMP 'YYYY-MM-DD HH:MI:SS.FFF' literal
+ */
+ @Override
+ public Integer visitTimestamp_literal(Timestamp_literalContext ctx) {
+ String str = evalPop(ctx.string()).toString();
+ int len = str.length();
+ int precision = 0;
+ if (len > 19 && len <= 29) {
+ precision = len - 20;
+ if (precision > 3) {
+ precision = 3;
+ }
+ }
+ stackPush(new Var(Utils.toTimestamp(str), precision));
+ return 0;
+ }
+
+ /**
+ * Get the package context within which the current routine is executed
+ */
+ Package getPackageCallContext() {
+ Scope cur = exec.currentScope;
+ while (cur != null) {
+ if (cur.type == Scope.Type.ROUTINE) {
+ return cur.pack;
+ }
+ cur = cur.parent;
+ }
+ return null;
+ }
+
+ /**
+ * Define the connection profile to execute the current statement
+ */
+ public String getStatementConnection() {
+ return exec.conf.defaultConnection;
+ }
+
+ /**
+ * Define the database type by profile name
+ */
+ Conn.Type getConnectionType(String conn) {
+ return exec.conn.getTypeByProfile(conn);
+ }
+
+ /**
+ * Get the current database type
+ */
+ public Conn.Type getConnectionType() {
+ return getConnectionType(exec.conf.defaultConnection);
+ }
+
+ /**
+ * Get node text including spaces
+ */
+ String getText(ParserRuleContext ctx) {
+ return ctx.start.getInputStream()
+ .getText(new org.antlr.v4.runtime.misc.Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex()));
+ }
+
+ String getText(ParserRuleContext ctx, Token start, Token stop) {
+ return ctx.start.getInputStream()
+ .getText(new org.antlr.v4.runtime.misc.Interval(start.getStartIndex(), stop.getStopIndex()));
+ }
+
+ /**
+ * Append the text preserving the formatting (space symbols) between tokens
+ */
+ void append(StringBuilder str, String appendStr, Token start, Token stop) {
+ String spaces = start.getInputStream()
+ .getText(new org.antlr.v4.runtime.misc.Interval(start.getStartIndex(), stop.getStopIndex()));
+ spaces = spaces.substring(start.getText().length(), spaces.length() - stop.getText().length());
+ str.append(spaces);
+ str.append(appendStr);
+ }
+
+ void append(StringBuilder str, TerminalNode start, TerminalNode stop) {
+ String text = start.getSymbol().getInputStream().getText(
+ new org.antlr.v4.runtime.misc.Interval(start.getSymbol().getStartIndex(),
+ stop.getSymbol().getStopIndex()));
+ str.append(text);
+ }
+
+ /**
+ * Get the first non-null node
+ */
+ TerminalNode nvl(TerminalNode t1, TerminalNode t2) {
+ if (t1 != null) {
+ return t1;
+ }
+ return t2;
+ }
+
+ /**
+ * Evaluate the expression and pop value from the stack
+ */
+ public Var evalPop(ParserRuleContext ctx) {
+ visit(ctx);
+ if (!exec.stack.isEmpty()) {
+ return exec.stackPop();
+ }
+ return Var.Empty;
+ }
+
+ /**
+ * Evaluate the data type and length
+ */
+ String evalPop(DtypeContext type,
+ Dtype_lenContext len) {
+ if (isConvert(exec.conf.defaultConnection)) {
+ return exec.converter.dataType(type, len);
+ }
+ return getText(type, type.getStart(), len == null ? type.getStop() : len.getStop());
+ }
+
+ /**
+ * Get formatted text between 2 tokens
+ */
+ public static String getFormattedText(ParserRuleContext ctx) {
+ return ctx.start.getInputStream().getText(
+ new org.antlr.v4.runtime.misc.Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex()));
+ }
+
+ /**
+ * Flag whether executed from UDF or not
+ */
+ public void setUdfRun(boolean udfRun) {
+ this.udfRun = udfRun;
+ }
+
+ /**
+ * Whether on-the-fly SQL conversion is required for the connection
+ */
+ boolean isConvert(String connName) {
+ return exec.conf.getConnectionConvert(connName);
+ }
+
+ /**
+ * Increment the row count
+ */
+ public int incRowCount() {
+ return exec.rowCount++;
+ }
+
+ /**
+ * Set the row count
+ */
+ public void setRowCount(int rowCount) {
+ exec.rowCount = rowCount;
+ }
+
+ /**
+ * Trace information
+ */
+ public void trace(ParserRuleContext ctx, String message) {
+ if (!trace) {
+ return;
+ }
+ if (ctx != null) {
+ console.printLine("Ln:" + ctx.getStart().getLine() + " " + message);
+ } else {
+ console.printLine(message);
+ }
+ }
+
+ /**
+ * Trace values retrived from the database
+ */
+ public void trace(ParserRuleContext ctx, Var var, Metadata meta, int idx) {
+ if (var.type != Var.Type.ROW) {
+ trace(ctx, "COLUMN: " + meta.columnName(idx) + ", " + meta.columnTypeName(idx));
+ trace(ctx, "SET " + var.getName() + " = " + var.toString());
+ } else {
+ Row row = (Row) var.value;
+ int cnt = row.size();
+ for (int j = 1; j <= cnt; j++) {
+ Var v = row.getValue(j - 1);
+ trace(ctx, "COLUMN: " + meta.columnName(j) + ", " + meta.columnTypeName(j));
+ trace(ctx, "SET " + v.getName() + " = " + v.toString());
+ }
+ }
+ }
+
+ /**
+ * Informational messages
+ */
+ public void info(ParserRuleContext ctx, String message) {
+ if (!info) {
+ return;
+ }
+ if (ctx != null) {
+ console.printError("Ln:" + ctx.getStart().getLine() + " " + message);
+ } else {
+ console.printError(message);
+ }
+ }
+
+ /**
+ * Error message
+ */
+ public void error(ParserRuleContext ctx, String message) {
+ if (ctx != null) {
+ console.printError("Ln:" + ctx.getStart().getLine() + " " + message);
+ } else {
+ console.printError(message);
+ }
+ }
+
+ public Stack getStack() {
+ return exec.stack;
+ }
+
+ public int getRowCount() {
+ return exec.rowCount;
+ }
+
+ public Conf getConf() {
+ return exec.conf;
+ }
+
+ public Meta getMeta() {
+ return exec.meta;
+ }
+
+ public boolean getTrace() {
+ return exec.trace;
+ }
+
+ public boolean getInfo() {
+ return exec.info;
+ }
+
+ public boolean getOffline() {
+ return exec.offline;
+ }
+
+ public Console getConsole() {
+ return console;
+ }
+
+ public void setQueryExecutor(QueryExecutor queryExecutor) {
+ this.queryExecutor = queryExecutor;
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/Expression.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/Expression.java
new file mode 100644
index 00000000000000..31df7564245601
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/Expression.java
@@ -0,0 +1,578 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Expression.java
+// and modified by Doris
+
+package org.apache.doris.plsql;
+
+import org.apache.doris.nereids.PLParser.Bool_exprContext;
+import org.apache.doris.nereids.PLParser.Bool_expr_binaryContext;
+import org.apache.doris.nereids.PLParser.Bool_expr_binary_operatorContext;
+import org.apache.doris.nereids.PLParser.Bool_expr_unaryContext;
+import org.apache.doris.nereids.PLParser.ExprContext;
+import org.apache.doris.nereids.PLParser.Expr_case_searchedContext;
+import org.apache.doris.nereids.PLParser.Expr_case_simpleContext;
+import org.apache.doris.nereids.PLParser.Expr_concatContext;
+import org.apache.doris.nereids.PLParser.Expr_cursor_attributeContext;
+import org.apache.doris.plsql.Var.Type;
+import org.apache.doris.plsql.exception.PlValidationException;
+
+import org.antlr.v4.runtime.ParserRuleContext;
+
+import java.math.BigDecimal;
+import java.sql.Date;
+import java.sql.Timestamp;
+import java.util.Calendar;
+
+/**
+ * Expressions
+ */
+public class Expression {
+
+ Exec exec;
+ boolean trace = false;
+
+ Expression(Exec e) {
+ exec = e;
+ trace = exec.getTrace();
+ }
+
+ /**
+ * Evaluate an expression
+ */
+ public void exec(ExprContext ctx) {
+ try {
+ if (ctx.PLUS() != null) {
+ operatorAdd(ctx);
+ } else if (ctx.SUBTRACT() != null) {
+ operatorSub(ctx);
+ } else if (ctx.ASTERISK() != null) {
+ operatorMultiply(ctx);
+ } else if (ctx.SLASH() != null) {
+ operatorDiv(ctx);
+ } else if (ctx.interval_item() != null) {
+ createInterval(ctx);
+ } else {
+ visitChildren(ctx);
+ }
+ } catch (PlValidationException e) {
+ throw e;
+ } catch (Exception e) {
+ exec.signal(e);
+ }
+ }
+
+ /**
+ * Evaluate a boolean expression
+ */
+ public void execBool(Bool_exprContext ctx) {
+ if (ctx.bool_expr_atom() != null) {
+ eval(ctx.bool_expr_atom());
+ return;
+ }
+ Var result = evalPop(ctx.bool_expr(0));
+ if (ctx.LEFT_PAREN() != null) {
+ if (ctx.NOT() != null) {
+ result.negate();
+ }
+ } else if (ctx.bool_expr_logical_operator() != null) {
+ if (ctx.bool_expr_logical_operator().AND() != null) {
+ if (result.isTrue()) {
+ result = evalPop(ctx.bool_expr(1));
+ }
+ } else if (ctx.bool_expr_logical_operator().OR() != null) {
+ if (!result.isTrue()) {
+ result = evalPop(ctx.bool_expr(1));
+ }
+ }
+ }
+ exec.stackPush(result);
+ }
+
+ /**
+ * Binary boolean expression
+ */
+ public Integer execBoolBinary(Bool_expr_binaryContext ctx) {
+ Bool_expr_binary_operatorContext op = ctx.bool_expr_binary_operator();
+ if (op.EQ() != null) {
+ operatorEqual(ctx, true);
+ } else if (op.NEQ() != null) {
+ operatorEqual(ctx, false);
+ } else if (op.GT() != null || op.LT() != null || op.GTE() != null
+ || op.LTE() != null) {
+ operatorCompare(ctx, op);
+ } else {
+ exec.stackPush(false);
+ }
+ return 0;
+ }
+
+ /**
+ * Unary boolean expression
+ */
+ public Integer execBoolUnary(Bool_expr_unaryContext ctx) {
+ boolean val = false;
+ if (ctx.IS() != null) {
+ val = evalPop(ctx.expr(0)).isNull();
+ if (ctx.NOT() != null) {
+ val = !val;
+ }
+ } else if (ctx.BETWEEN() != null) {
+ Var v = evalPop(ctx.expr(0));
+ Var v1 = evalPop(ctx.expr(1));
+ int cmp = v.compareTo(v1);
+ if (cmp >= 0) {
+ Var v2 = evalPop(ctx.expr(2));
+ cmp = v.compareTo(v2);
+ if (cmp <= 0) {
+ val = true;
+ }
+ }
+ }
+ exec.stackPush(val);
+ return 0;
+ }
+
+ /**
+ * Cursor attribute %ISOPEN, %FOUND and %NOTFOUND
+ */
+ public void execCursorAttribute(Expr_cursor_attributeContext ctx) {
+ String name = ctx.ident_pl().getText();
+ Var val = new Var(Var.Type.BOOL);
+ Var cursorVar = exec.findCursor(name);
+ if (cursorVar != null) {
+ Cursor cursor = (Cursor) cursorVar.value;
+ if (cursor != null) {
+ if (ctx.ISOPEN() != null) {
+ val.setValue(cursor.isOpen());
+ } else if (ctx.FOUND() != null) {
+ val.setValue(cursor.isFound());
+ } else if (ctx.NOTFOUND() != null) {
+ val.setValue(cursor.isNotFound());
+ }
+ }
+ exec.stackPush(val);
+ } else {
+ trace(ctx, "Cursor not found: " + name);
+ exec.signal(Signal.Type.SQLEXCEPTION);
+ }
+ }
+
+ /**
+ * Addition operator
+ */
+ public void operatorAdd(ExprContext ctx) {
+ Var v1 = evalPop(ctx.expr(0));
+ Var v2 = evalPop(ctx.expr(1));
+ if (v1.value == null || v2.value == null) {
+ evalNull();
+ } else if (v1.type == Type.BIGINT && v2.type == Type.BIGINT) {
+ exec.stackPush(new Var((long) v1.value + (long) v2.value));
+ } else if (v1.type == Type.BIGINT && v2.type == Type.DECIMAL) {
+ exec.stackPush(new Var((new BigDecimal((long) v1.value)).add((BigDecimal) v2.value)));
+ } else if (v1.type == Type.BIGINT && v2.type == Type.DOUBLE) {
+ exec.stackPush(new Var((long) v1.value + (double) v2.value));
+ } else if (v1.type == Type.DECIMAL && v2.type == Type.DECIMAL) {
+ exec.stackPush(new Var(((BigDecimal) v1.value).add((BigDecimal) v2.value)));
+ } else if (v1.type == Type.DECIMAL && v2.type == Type.BIGINT) {
+ exec.stackPush(new Var(((BigDecimal) v1.value).add(new BigDecimal((long) v2.value))));
+ } else if (v1.type == Type.DECIMAL && v2.type == Type.DOUBLE) {
+ exec.stackPush(new Var(((BigDecimal) v1.value).add(new BigDecimal((double) v2.value))));
+ } else if (v1.type == Type.DOUBLE && v2.type == Type.DOUBLE) {
+ exec.stackPush(new Var((double) v1.value + (double) v2.value));
+ } else if (v1.type == Type.DOUBLE && v2.type == Type.DECIMAL) {
+ exec.stackPush(new Var((new BigDecimal((double) v1.value)).add((BigDecimal) v2.value)));
+ } else if (v1.type == Type.DOUBLE && v2.type == Type.BIGINT) {
+ exec.stackPush(new Var(((double) v1.value) + (long) v2.value));
+ } else if (v1.type == Type.BIGINT && v2.type == Type.DATE) {
+ exec.stackPush(changeDateByInt((Date) v2.value, (long) v1.value, true /*add*/));
+ } else if (v1.type == Type.DATE && v2.type == Type.BIGINT) {
+ exec.stackPush(changeDateByInt((Date) v1.value, (long) v2.value, true /*add*/));
+ } else if (v1.type == Type.STRING && v2.type == Type.STRING) {
+ exec.stackPush(((String) v1.value) + ((String) v2.value));
+ } else if (v1.type == Type.DATE && v2.type == Type.INTERVAL) {
+ exec.stackPush(new Var(((Interval) v2.value).dateChange((Date) v1.value, true /*add*/)));
+ } else if (v1.type == Type.TIMESTAMP && v2.type == Type.INTERVAL) {
+ exec.stackPush(
+ new Var(((Interval) v2.value).timestampChange((Timestamp) v1.value, true /*add*/), v1.scale));
+ } else {
+ unsupported(ctx, v1, v2, "+");
+ }
+ }
+
+ /**
+ * Subtraction operator
+ */
+ public void operatorSub(ExprContext ctx) {
+ Var v1 = evalPop(ctx.expr(0));
+ Var v2 = evalPop(ctx.expr(1));
+ if (v1.value == null || v2.value == null) {
+ evalNull();
+ } else if (v1.type == Type.BIGINT && v2.type == Type.BIGINT) {
+ exec.stackPush(new Var((long) v1.value - (long) v2.value));
+ } else if (v1.type == Type.BIGINT && v2.type == Type.DECIMAL) {
+ exec.stackPush(new Var((new BigDecimal((long) v1.value)).subtract((BigDecimal) v2.value)));
+ } else if (v1.type == Type.BIGINT && v2.type == Type.DOUBLE) {
+ exec.stackPush(new Var((long) v1.value - (double) v2.value));
+ } else if (v1.type == Type.DECIMAL && v2.type == Type.DECIMAL) {
+ exec.stackPush(new Var(((BigDecimal) v1.value).subtract((BigDecimal) v2.value)));
+ } else if (v1.type == Type.DECIMAL && v2.type == Type.BIGINT) {
+ exec.stackPush(new Var(((BigDecimal) v1.value).subtract(new BigDecimal((long) v2.value))));
+ } else if (v1.type == Type.DECIMAL && v2.type == Type.DOUBLE) {
+ exec.stackPush(new Var(((BigDecimal) v1.value).subtract(new BigDecimal((double) v2.value))));
+ } else if (v1.type == Type.DOUBLE && v2.type == Type.DOUBLE) {
+ exec.stackPush(new Var((double) v1.value - (double) v2.value));
+ } else if (v1.type == Type.DOUBLE && v2.type == Type.DECIMAL) {
+ exec.stackPush(new Var((new BigDecimal((double) v1.value)).subtract((BigDecimal) v2.value)));
+ } else if (v1.type == Type.DOUBLE && v2.type == Type.BIGINT) {
+ exec.stackPush(new Var(((double) v1.value) - (long) v2.value));
+ } else if (v1.type == Type.DATE && v2.type == Type.BIGINT) {
+ exec.stackPush(changeDateByInt((Date) v1.value, (long) v2.value, false /*subtract*/));
+ } else if (v1.type == Type.DATE && v2.type == Type.INTERVAL) {
+ exec.stackPush(new Var(((Interval) v2.value).dateChange((Date) v1.value, false /*subtract*/)));
+ } else if (v1.type == Type.TIMESTAMP && v2.type == Type.INTERVAL) {
+ exec.stackPush(
+ new Var(((Interval) v2.value).timestampChange((Timestamp) v1.value, false /*subtract*/), v1.scale));
+ } else {
+ unsupported(ctx, v1, v2, "-");
+ }
+ }
+
+ /**
+ * Multiplication operator
+ */
+ public void operatorMultiply(ExprContext ctx) {
+ Var v1 = evalPop(ctx.expr(0));
+ Var v2 = evalPop(ctx.expr(1));
+ if (v1.value == null || v2.value == null) {
+ evalNull();
+ } else if (v1.type == Type.BIGINT && v2.type == Type.BIGINT) {
+ exec.stackPush(new Var((long) v1.value * (long) v2.value));
+ } else if (v1.type == Type.BIGINT && v2.type == Type.DECIMAL) {
+ exec.stackPush(new Var((new BigDecimal((long) v1.value)).multiply((BigDecimal) v2.value)));
+ } else if (v1.type == Type.BIGINT && v2.type == Type.DOUBLE) {
+ exec.stackPush(new Var((long) v1.value * (double) v2.value));
+ } else if (v1.type == Type.DECIMAL && v2.type == Type.DECIMAL) {
+ exec.stackPush(new Var(((BigDecimal) v1.value).multiply((BigDecimal) v2.value)));
+ } else if (v1.type == Type.DECIMAL && v2.type == Type.BIGINT) {
+ exec.stackPush(new Var(((BigDecimal) v1.value).multiply(new BigDecimal((long) v2.value))));
+ } else if (v1.type == Type.DECIMAL && v2.type == Type.DOUBLE) {
+ exec.stackPush(new Var(((BigDecimal) v1.value).multiply(new BigDecimal((double) v2.value))));
+ } else if (v1.type == Type.DOUBLE && v2.type == Type.DOUBLE) {
+ exec.stackPush(new Var((double) v1.value * (double) v2.value));
+ } else if (v1.type == Type.DOUBLE && v2.type == Type.DECIMAL) {
+ exec.stackPush(new Var((new BigDecimal((double) v1.value)).multiply((BigDecimal) v2.value)));
+ } else if (v1.type == Type.DOUBLE && v2.type == Type.BIGINT) {
+ exec.stackPush(new Var(((double) v1.value) * (long) v2.value));
+ } else {
+ unsupported(ctx, v1, v2, "*");
+ }
+ }
+
+ /**
+ * Division operator
+ */
+ public void operatorDiv(ExprContext ctx) {
+ Var v1 = evalPop(ctx.expr(0));
+ Var v2 = evalPop(ctx.expr(1));
+ if (v1.value == null || v2.value == null) {
+ evalNull();
+ } else if (v1.type == Type.BIGINT && v2.type == Type.BIGINT) {
+ exec.stackPush(new Var((long) v1.value / (long) v2.value));
+ } else if (v1.type == Type.BIGINT && v2.type == Type.DECIMAL) {
+ exec.stackPush(new Var((new BigDecimal((long) v1.value)).divide((BigDecimal) v2.value)));
+ } else if (v1.type == Type.BIGINT && v2.type == Type.DOUBLE) {
+ exec.stackPush(new Var((long) v1.value / (double) v2.value));
+ } else if (v1.type == Type.DECIMAL && v2.type == Type.DECIMAL) {
+ exec.stackPush(new Var(((BigDecimal) v1.value).divide((BigDecimal) v2.value)));
+ } else if (v1.type == Type.DECIMAL && v2.type == Type.BIGINT) {
+ exec.stackPush(new Var(((BigDecimal) v1.value).divide(new BigDecimal((long) v2.value))));
+ } else if (v1.type == Type.DECIMAL && v2.type == Type.DOUBLE) {
+ exec.stackPush(new Var(((BigDecimal) v1.value).divide(new BigDecimal((double) v2.value))));
+ } else if (v1.type == Type.DOUBLE && v2.type == Type.DOUBLE) {
+ exec.stackPush(new Var((double) v1.value / (double) v2.value));
+ } else if (v1.type == Type.DOUBLE && v2.type == Type.DECIMAL) {
+ exec.stackPush(new Var((new BigDecimal((double) v1.value)).divide((BigDecimal) v2.value)));
+ } else if (v1.type == Type.DOUBLE && v2.type == Type.BIGINT) {
+ exec.stackPush(new Var(((double) v1.value) / (long) v2.value));
+ } else {
+ unsupported(ctx, v1, v2, "/");
+ }
+ }
+
+ private void unsupported(ExprContext ctx, Var op1, Var op2, String operator) {
+ String msg = String.format("Unsupported data types in '%s' operator (%s%s%s)", operator, op1.type, operator,
+ op2.type);
+ if (ctx != null) {
+ msg = "Ln:" + ctx.getStart().getLine() + " " + msg;
+ }
+ exec.signal(Signal.Type.UNSUPPORTED_OPERATION, msg);
+ }
+
+ /**
+ * Add or subtract the specified number of days from DATE
+ */
+ public Var changeDateByInt(Date d, Long i, boolean add) {
+ Calendar c = Calendar.getInstance();
+ c.setTimeInMillis(d.getTime());
+ int days = i.intValue();
+ if (!add) {
+ days *= -1;
+ }
+ c.add(Calendar.DAY_OF_MONTH, days);
+ return new Var(new Date(c.getTimeInMillis()));
+ }
+
+ /**
+ * Equality operator
+ */
+ public void operatorEqual(Bool_expr_binaryContext ctx, boolean equal) {
+ Var v1 = evalPop(ctx.expr(0));
+ Var v2 = evalPop(ctx.expr(1));
+ boolean eq = v1.equals(v2);
+ if (!equal) {
+ eq = !eq;
+ }
+ exec.stackPush(eq);
+ }
+
+ /**
+ * Comparison operator
+ */
+ public void operatorCompare(Bool_expr_binaryContext ctx,
+ Bool_expr_binary_operatorContext op) {
+ Var v1 = evalPop(ctx.expr(0));
+ Var v2 = evalPop(ctx.expr(1));
+ int cmp = v1.compareTo(v2);
+ boolean bool = false;
+ if (op.GT() != null) {
+ if (cmp > 0) {
+ bool = true;
+ }
+ } else if (op.GTE() != null) {
+ if (cmp >= 0) {
+ bool = true;
+ }
+ }
+ if (op.LT() != null) {
+ if (cmp < 0) {
+ bool = true;
+ }
+ } else if (op.LTE() != null) {
+ if (cmp <= 0) {
+ bool = true;
+ }
+ }
+ exec.stackPush(bool);
+ }
+
+ /**
+ * String concatenation operator
+ */
+ public void operatorConcat(Expr_concatContext ctx) {
+ StringBuilder val = new StringBuilder();
+ int cnt = ctx.expr_concat_item().size();
+ boolean nulls = true;
+ for (int i = 0; i < cnt; i++) {
+ Var c = evalPop(ctx.expr_concat_item(i));
+ if (!c.isNull()) {
+ val.append(c.toString());
+ nulls = false;
+ }
+ }
+ if (nulls) {
+ evalNull();
+ } else {
+ evalString(val);
+ }
+ }
+
+ /**
+ * String concatenation operator in executable SQL statement
+ */
+ public void operatorConcatSql(Expr_concatContext ctx) {
+ StringBuilder sql = new StringBuilder();
+ sql.append("CONCAT(");
+ int cnt = ctx.expr_concat_item().size();
+ for (int i = 0; i < cnt; i++) {
+ sql.append(evalPop(ctx.expr_concat_item(i)).toString());
+ if (i + 1 < cnt) {
+ sql.append(", ");
+ }
+ }
+ sql.append(")");
+ exec.stackPush(sql);
+ }
+
+ /**
+ * Simple CASE expression
+ */
+ public void execSimpleCase(Expr_case_simpleContext ctx) {
+ int i = 1;
+ int cnt = ctx.expr().size();
+ boolean found = false;
+ Var val = evalPop(ctx.expr(0));
+ while (i < cnt) {
+ Var when = evalPop(ctx.expr(i));
+ if (val.compareTo(when) == 0) {
+ visit(ctx.expr(i + 1));
+ found = true;
+ break;
+ }
+ i += 2;
+ }
+ if (!found) {
+ if (ctx.ELSE() != null) {
+ visit(ctx.expr(cnt - 1));
+ } else {
+ evalNull();
+ }
+ }
+ }
+
+ /**
+ * Simple CASE expression in executable SQL statement
+ */
+ public void execSimpleCaseSql(Expr_case_simpleContext ctx) {
+ StringBuilder sql = new StringBuilder();
+ sql.append("CASE ");
+ sql.append(evalPop(ctx.expr(0)).toString());
+ int cnt = ctx.WHEN().size();
+ for (int i = 0; i < cnt; i++) {
+ sql.append(" WHEN ");
+ sql.append(evalPop(ctx.expr(i * 2 + 1)).toString());
+ sql.append(" THEN ");
+ sql.append(evalPop(ctx.expr(i * 2 + 2)).toString());
+ }
+ if (ctx.ELSE() != null) {
+ sql.append(" ELSE ");
+ sql.append(evalPop(ctx.expr(cnt * 2 + 1)).toString());
+ }
+ sql.append(" END");
+ exec.stackPush(sql);
+ }
+
+ /**
+ * Searched CASE expression
+ */
+ public void execSearchedCase(Expr_case_searchedContext ctx) {
+ int cnt = ctx.bool_expr().size();
+ boolean found = false;
+ for (int i = 0; i < cnt; i++) {
+ if (evalPop(ctx.bool_expr(i)).isTrue()) {
+ visit(ctx.expr(i));
+ found = true;
+ break;
+ }
+ }
+ if (!found) {
+ if (ctx.ELSE() != null) {
+ visit(ctx.expr(cnt));
+ } else {
+ evalNull();
+ }
+ }
+ }
+
+ /**
+ * Searched CASE expression in executable SQL statement
+ */
+ public void execSearchedCaseSql(Expr_case_searchedContext ctx) {
+ StringBuilder sql = new StringBuilder();
+ sql.append("CASE");
+ int cnt = ctx.WHEN().size();
+ for (int i = 0; i < cnt; i++) {
+ sql.append(" WHEN ");
+ sql.append(evalPop(ctx.bool_expr(i)).toString());
+ sql.append(" THEN ");
+ sql.append(evalPop(ctx.expr(i)).toString());
+ }
+ if (ctx.ELSE() != null) {
+ sql.append(" ELSE ");
+ sql.append(evalPop(ctx.expr(cnt)).toString());
+ }
+ sql.append(" END");
+ exec.stackPush(sql);
+ }
+
+ /**
+ * Create an interval variable
+ */
+ public void createInterval(ExprContext ctx) {
+ int num = evalPop(ctx.expr(0)).intValue();
+ Interval interval = new Interval().set(num, ctx.interval_item().getText());
+ exec.stackPush(new Var(interval));
+ }
+
+ /**
+ * Evaluate the expression and push the value to the stack
+ */
+ void eval(ParserRuleContext ctx) {
+ visit(ctx);
+ }
+
+ /**
+ * Evaluate the expression and pop value from the stack
+ */
+ Var evalPop(ParserRuleContext ctx) {
+ visit(ctx);
+ if (!exec.stack.isEmpty()) {
+ return exec.stackPop();
+ }
+ return Var.Empty;
+ }
+
+ /**
+ * Evaluate the expression to specified String value
+ */
+ void evalString(String string) {
+ exec.stackPush(new Var(string));
+ }
+
+ void evalString(StringBuilder string) {
+ evalString(string.toString());
+ }
+
+ /**
+ * Evaluate the expression to NULL
+ */
+ void evalNull() {
+ exec.stackPush(Var.Null);
+ }
+
+ /**
+ * Execute rules
+ */
+ Integer visit(ParserRuleContext ctx) {
+ return exec.visit(ctx);
+ }
+
+ /**
+ * Execute children rules
+ */
+ Integer visitChildren(ParserRuleContext ctx) {
+ return exec.visitChildren(ctx);
+ }
+
+ /**
+ * Trace information
+ */
+ public void trace(ParserRuleContext ctx, String message) {
+ exec.trace(ctx, message);
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/File.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/File.java
new file mode 100644
index 00000000000000..377f726edcb918
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/File.java
@@ -0,0 +1,157 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/File.java
+// and modified by Doris
+
+package org.apache.doris.plsql;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+
+import java.io.IOException;
+
+/**
+ * HDFS file operations
+ */
+public class File {
+ Path path;
+ FileSystem fs;
+ FSDataInputStream in;
+ FSDataOutputStream out;
+
+ /**
+ * Create FileSystem object
+ */
+ public FileSystem createFs() throws IOException {
+ fs = FileSystem.get(new Configuration());
+ return fs;
+ }
+
+ /**
+ * Create a file
+ */
+ public FSDataOutputStream create(boolean overwrite) {
+ try {
+ if (fs == null) {
+ fs = createFs();
+ }
+ out = fs.create(path, overwrite);
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ return out;
+ }
+
+ public FSDataOutputStream create(String dir, String file, boolean overwrite) {
+ path = new Path(dir, file);
+ return create(overwrite);
+ }
+
+ public FSDataOutputStream create(String file, boolean overwrite) {
+ path = new Path(file);
+ return create(overwrite);
+ }
+
+ /**
+ * Open an existing file
+ */
+ public void open(String dir, String file) {
+ path = new Path(dir, file);
+ try {
+ if (fs == null) {
+ fs = createFs();
+ }
+ in = fs.open(path);
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+
+ /**
+ * Check if the directory or file exists
+ *
+ * @throws IOException
+ */
+ boolean exists(String name) throws IOException {
+ if (fs == null) {
+ fs = createFs();
+ }
+ return fs.exists(new Path(name));
+ }
+
+ /**
+ * Read a character from input
+ *
+ * @throws IOException
+ */
+ public char readChar() throws IOException {
+ return in.readChar();
+ }
+
+ /**
+ * Write string to file
+ */
+ public void writeString(String str) {
+ try {
+ out.writeChars(str);
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+
+ /**
+ * Close a file
+ */
+ public void close() {
+ try {
+ if (in != null) {
+ in.close();
+ }
+ if (out != null) {
+ out.close();
+ }
+ in = null;
+ out = null;
+ path = null;
+ fs = null;
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+
+ /**
+ * Get the fully-qualified path
+ * NOTE: FileSystem.resolvePath() is not available in Hadoop 1.2.1
+ *
+ * @throws IOException
+ */
+ public Path resolvePath(Path path) throws IOException {
+ return fs.getFileStatus(path).getPath();
+ }
+
+ @Override
+ public String toString() {
+ if (path != null) {
+ return "FILE <" + path.toString() + ">";
+ }
+ return "FILE ";
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/Handler.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/Handler.java
new file mode 100644
index 00000000000000..eb2caf013af9e0
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/Handler.java
@@ -0,0 +1,48 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Handler.java
+// and modified by Doris
+
+package org.apache.doris.plsql;
+
+import org.apache.doris.nereids.PLParser.Declare_handler_itemContext;
+import org.apache.doris.plsql.Signal.Type;
+
+/**
+ * PL/SQL condition and exception handler
+ */
+public class Handler {
+ public enum ExecType {
+ CONTINUE, EXIT
+ }
+
+ ExecType execType;
+ Type type;
+ String value;
+ Scope scope;
+ Declare_handler_itemContext ctx;
+
+ Handler(ExecType execType, Type type, String value, Scope scope,
+ Declare_handler_itemContext ctx) {
+ this.execType = execType;
+ this.type = type;
+ this.value = value;
+ this.scope = scope;
+ this.ctx = ctx;
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/Interval.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/Interval.java
new file mode 100644
index 00000000000000..6364f25d0aee5a
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/Interval.java
@@ -0,0 +1,111 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Interval.java
+// and modified by Doris
+
+package org.apache.doris.plsql;
+
+import java.sql.Date;
+import java.sql.Timestamp;
+import java.util.Calendar;
+
+/**
+ * Date and time interval
+ */
+public class Interval {
+ int days = 0;
+ int milliseconds = 0;
+
+ /**
+ * Add or subtract interval value to the specified date
+ */
+ public Date dateChange(Date in, boolean add) {
+ Calendar c = Calendar.getInstance();
+ c.setTimeInMillis(in.getTime());
+ calendarChange(c, add);
+ return new Date(c.getTimeInMillis());
+ }
+
+ /**
+ * Add or subtract interval value to the specified timestamp
+ */
+ public Timestamp timestampChange(Timestamp in, boolean add) {
+ Calendar c = Calendar.getInstance();
+ c.setTimeInMillis(in.getTime());
+ calendarChange(c, add);
+ return new Timestamp(c.getTimeInMillis());
+ }
+
+ /**
+ * Add interval value to the specified Calendar value
+ */
+ public Calendar calendarChange(Calendar c, boolean add) {
+ int a = 1;
+ if (!add) {
+ a = -1;
+ }
+ if (days != 0) {
+ c.add(Calendar.DAY_OF_MONTH, days * a);
+ }
+ if (milliseconds != 0) {
+ c.setTimeInMillis(c.getTimeInMillis() + milliseconds * a);
+ }
+ return c;
+ }
+
+ /**
+ * Set interval value
+ */
+ public Interval set(int value, String item) {
+ if (item.compareToIgnoreCase("DAYS") == 0 || item.compareToIgnoreCase("DAY") == 0) {
+ setDays(value);
+ }
+ if (item.compareToIgnoreCase("MICROSECONDS") == 0 || item.compareToIgnoreCase("MICROSECOND") == 0) {
+ setMilliseconds(value);
+ }
+ return this;
+ }
+
+ /**
+ * Set interval items
+ */
+ public void setDays(int days) {
+ this.days = days;
+ }
+
+ public void setMilliseconds(int milliseconds) {
+ this.milliseconds = milliseconds;
+ }
+
+ /**
+ * Convert interval to string
+ */
+ @Override
+ public String toString() {
+ StringBuilder s = new StringBuilder();
+ if (days != 0) {
+ s.append(days);
+ s.append(" days");
+ }
+ if (milliseconds != 0) {
+ s.append(milliseconds);
+ s.append(" milliseconds");
+ }
+ return s.toString();
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/Meta.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/Meta.java
new file mode 100644
index 00000000000000..5d5bd1fd7fc42f
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/Meta.java
@@ -0,0 +1,314 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Meta.java
+// and modified by Doris
+
+package org.apache.doris.plsql;
+
+import org.apache.doris.plsql.executor.Metadata;
+import org.apache.doris.plsql.executor.QueryExecutor;
+import org.apache.doris.plsql.executor.QueryResult;
+
+import org.antlr.v4.runtime.ParserRuleContext;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+
+/**
+ * Metadata
+ */
+public class Meta {
+
+ HashMap> dataTypes = new HashMap>();
+
+ Exec exec;
+ boolean trace = false;
+ boolean info = false;
+ private QueryExecutor queryExecutor;
+
+ Meta(Exec e, QueryExecutor queryExecutor) {
+ exec = e;
+ trace = exec.getTrace();
+ info = exec.getInfo();
+ this.queryExecutor = queryExecutor;
+ }
+
+ /**
+ * Get the data type of column (column name is qualified i.e. schema.table.column)
+ */
+ String getDataType(ParserRuleContext ctx, String conn, String column) {
+ String type = null;
+ HashMap map = dataTypes.get(conn);
+ if (map == null) {
+ map = new HashMap();
+ dataTypes.put(conn, map);
+ }
+ ArrayList twoparts = splitIdentifierToTwoParts(column);
+ if (twoparts != null) {
+ String tab = twoparts.get(0);
+ String col = twoparts.get(1).toUpperCase();
+ Row row = map.get(tab);
+ if (row != null) {
+ type = row.getType(col);
+ } else {
+ row = readColumns(ctx, conn, tab, map);
+ if (row != null) {
+ type = row.getType(col);
+ }
+ }
+ }
+ return type;
+ }
+
+ /**
+ * Get data types for all columns of the table
+ */
+ Row getRowDataType(ParserRuleContext ctx, String conn, String table) {
+ HashMap map = dataTypes.get(conn);
+ if (map == null) {
+ map = new HashMap();
+ dataTypes.put(conn, map);
+ }
+ Row row = map.get(table);
+ if (row == null) {
+ row = readColumns(ctx, conn, table, map);
+ }
+ return row;
+ }
+
+ /**
+ * Get data types for all columns of the SELECT statement
+ */
+ Row getRowDataTypeForSelect(ParserRuleContext ctx, String conn, String select) {
+ Row row = null;
+ Conn.Type connType = exec.getConnectionType(conn);
+ // Hive does not support ResultSetMetaData on PreparedStatement, and Hive DESCRIBE
+ // does not support queries, so we have to execute the query with LIMIT 1
+ if (connType == Conn.Type.HIVE) {
+ String sql = "SELECT * FROM (" + select + ") t LIMIT 1";
+ QueryResult query = queryExecutor.executeQuery(sql, ctx);
+ if (!query.error()) {
+ try {
+ int cols = query.columnCount();
+ row = new Row();
+ for (int i = 0; i < cols; i++) {
+ String name = query.metadata().columnName(i);
+ if (name.startsWith("t.")) {
+ name = name.substring(2);
+ }
+ row.addColumnDefinition(name, query.metadata().columnTypeName(i));
+ }
+ } catch (Exception e) {
+ exec.signal(e);
+ }
+ } else {
+ exec.signal(query.exception());
+ }
+ query.close();
+ } else {
+ QueryResult query = queryExecutor.executeQuery(select, ctx);
+ if (!query.error()) {
+ try {
+ Metadata rm = query.metadata();
+ int cols = rm.columnCount();
+ for (int i = 1; i <= cols; i++) {
+ String col = rm.columnName(i);
+ String typ = rm.columnTypeName(i);
+ if (row == null) {
+ row = new Row();
+ }
+ row.addColumnDefinition(col.toUpperCase(), typ);
+ }
+ } catch (Exception e) {
+ exec.signal(e);
+ }
+ }
+ query.close();
+ }
+ return row;
+ }
+
+ /**
+ * Read the column data from the database and cache it
+ */
+ Row readColumns(ParserRuleContext ctx, String conn, String table, HashMap map) {
+ Row row = null;
+ Conn.Type connType = exec.getConnectionType(conn);
+ if (connType == Conn.Type.HIVE) {
+ String sql = "DESCRIBE " + table;
+ QueryResult query = queryExecutor.executeQuery(sql, ctx);
+ if (!query.error()) {
+ try {
+ while (query.next()) {
+ String col = query.column(0, String.class);
+ String typ = query.column(1, String.class);
+ if (row == null) {
+ row = new Row();
+ }
+ // Hive DESCRIBE outputs "empty_string NULL" row before partition information
+ if (typ == null) {
+ break;
+ }
+ row.addColumnDefinition(col.toUpperCase(), typ);
+ }
+ map.put(table, row);
+ } catch (Exception e) {
+ exec.signal(e);
+ }
+ } else {
+ exec.signal(query.exception());
+ }
+ query.close();
+ } else {
+ QueryResult query = queryExecutor.executeQuery("SELECT * FROM " + table, ctx);
+ if (!query.error()) {
+ try {
+ Metadata rm = query.metadata();
+ int cols = query.columnCount();
+ for (int i = 1; i <= cols; i++) {
+ String col = rm.columnName(i);
+ String typ = rm.columnTypeName(i);
+ if (row == null) {
+ row = new Row();
+ }
+ row.addColumnDefinition(col.toUpperCase(), typ);
+ }
+ map.put(table, row);
+ } catch (Exception ignored) {
+ // ignored
+ }
+ }
+ query.close();
+ }
+ return row;
+ }
+
+ /**
+ * Normalize identifier for a database object (convert "" [] to `` i.e.)
+ */
+ public String normalizeObjectIdentifier(String name) {
+ ArrayList parts = splitIdentifier(name);
+ if (parts != null) { // more then one part exist
+ StringBuilder norm = new StringBuilder();
+ int size = parts.size();
+ boolean appended = false;
+ for (int i = 0; i < size; i++) {
+ if (i == size - 2) { // schema name
+ String schema = getTargetSchemaName(parts.get(i));
+ if (schema != null) {
+ norm.append(schema);
+ appended = true;
+ }
+ } else {
+ norm.append(normalizeIdentifierPart(parts.get(i)));
+ appended = true;
+ }
+ if (i + 1 < parts.size() && appended) {
+ norm.append(".");
+ }
+ }
+ return norm.toString();
+ }
+ return normalizeIdentifierPart(name);
+ }
+
+ /**
+ * Get the schema name to be used in the final executed SQL
+ */
+ String getTargetSchemaName(String name) {
+ if (name.equalsIgnoreCase("dbo") || name.equalsIgnoreCase("[dbo]")) {
+ return null;
+ }
+ return normalizeIdentifierPart(name);
+ }
+
+ /**
+ * Normalize identifier (single part) - convert "" [] to `` i.e.
+ */
+ public String normalizeIdentifierPart(String name) {
+ char start = name.charAt(0);
+ char end = name.charAt(name.length() - 1);
+ if ((start == '[' && end == ']') || (start == '"' && end == '"')) {
+ return '`' + name.substring(1, name.length() - 1) + '`';
+ }
+ return name;
+ }
+
+ /**
+ * Split qualified object to 2 parts: schema.tab.col -> schema.tab|col; tab.col -> tab|col
+ */
+ public ArrayList splitIdentifierToTwoParts(String name) {
+ ArrayList parts = splitIdentifier(name);
+ ArrayList twoparts = null;
+ if (parts != null) {
+ StringBuilder id = new StringBuilder();
+ int i = 0;
+ for (; i < parts.size() - 1; i++) {
+ id.append(parts.get(i));
+ if (i + 1 < parts.size() - 1) {
+ id.append(".");
+ }
+ }
+ twoparts = new ArrayList();
+ twoparts.add(id.toString());
+ id.setLength(0);
+ id.append(parts.get(i));
+ twoparts.add(id.toString());
+ }
+ return twoparts;
+ }
+
+ /**
+ * Split identifier to parts (schema, table, colum name etc.)
+ *
+ * @return null if identifier contains single part
+ */
+ public ArrayList splitIdentifier(String name) {
+ ArrayList parts = null;
+ int start = 0;
+ for (int i = 0; i < name.length(); i++) {
+ char c = name.charAt(i);
+ char del = '\0';
+ if (c == '`' || c == '"') {
+ del = c;
+ } else if (c == '[') {
+ del = ']';
+ }
+ if (del != '\0') {
+ for (int j = i + 1; i < name.length(); j++) {
+ i++;
+ if (name.charAt(j) == del) {
+ break;
+ }
+ }
+ continue;
+ }
+ if (c == '.') {
+ if (parts == null) {
+ parts = new ArrayList();
+ }
+ parts.add(name.substring(start, i));
+ start = i + 1;
+ }
+ }
+ if (parts != null) {
+ parts.add(name.substring(start));
+ }
+ return parts;
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/Package.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/Package.java
new file mode 100644
index 00000000000000..91811ce35bd1c3
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/Package.java
@@ -0,0 +1,196 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Package.java
+// and modified by Doris
+
+package org.apache.doris.plsql;
+
+import org.apache.doris.nereids.PLParser.Create_function_stmtContext;
+import org.apache.doris.nereids.PLParser.Create_package_body_stmtContext;
+import org.apache.doris.nereids.PLParser.Create_package_stmtContext;
+import org.apache.doris.nereids.PLParser.Create_procedure_stmtContext;
+import org.apache.doris.nereids.PLParser.Expr_func_paramsContext;
+import org.apache.doris.nereids.PLParser.Package_body_itemContext;
+import org.apache.doris.nereids.PLParser.Package_spec_itemContext;
+import org.apache.doris.plsql.functions.BuiltinFunctions;
+import org.apache.doris.plsql.functions.InMemoryFunctionRegistry;
+
+import org.antlr.v4.runtime.ParserRuleContext;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Program package
+ */
+public class Package {
+
+ private String name;
+ private List vars = new ArrayList<>();
+ private List publicFuncs = new ArrayList<>();
+ private List publicProcs = new ArrayList<>();
+
+ HashMap func = new HashMap<>();
+ HashMap proc = new HashMap<>();
+
+ boolean allMembersPublic = false;
+
+ Exec exec;
+ InMemoryFunctionRegistry function;
+ boolean trace = false;
+
+ Package(String name, Exec exec, BuiltinFunctions builtinFunctions) {
+ this.name = name;
+ this.exec = exec;
+ this.function = new InMemoryFunctionRegistry(exec, builtinFunctions);
+ this.trace = exec.getTrace();
+ }
+
+ /**
+ * Add a local variable
+ */
+ public void addVariable(Var var) {
+ vars.add(var);
+ }
+
+ /**
+ * Find the variable by name
+ */
+ public Var findVariable(String name) {
+ for (Var var : vars) {
+ if (name.equalsIgnoreCase(var.getName())) {
+ return var;
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Create the package specification
+ */
+ public void createSpecification(Create_package_stmtContext ctx) {
+ int cnt = ctx.package_spec().package_spec_item().size();
+ for (int i = 0; i < cnt; i++) {
+ Package_spec_itemContext c = ctx.package_spec().package_spec_item(i);
+ if (c.declare_stmt_item() != null) {
+ visit(c);
+ } else if (c.FUNCTION() != null) {
+ publicFuncs.add(c.ident_pl().getText().toUpperCase());
+ } else if (c.PROC() != null || c.PROCEDURE() != null) {
+ publicProcs.add(c.ident_pl().getText().toUpperCase());
+ }
+ }
+ }
+
+ /**
+ * Create the package body
+ */
+ public void createBody(Create_package_body_stmtContext ctx) {
+ int cnt = ctx.package_body().package_body_item().size();
+ for (int i = 0; i < cnt; i++) {
+ Package_body_itemContext c = ctx.package_body().package_body_item(i);
+ if (c.declare_stmt_item() != null) {
+ visit(c);
+ } else if (c.create_function_stmt() != null) {
+ func.put(c.create_function_stmt().multipartIdentifier().getText().toUpperCase(),
+ c.create_function_stmt());
+ } else if (c.create_procedure_stmt() != null) {
+ proc.put(c.create_procedure_stmt().multipartIdentifier().getText().toUpperCase(),
+ c.create_procedure_stmt());
+ }
+ }
+ }
+
+ /**
+ * Execute function
+ */
+ public boolean execFunc(String name, Expr_func_paramsContext ctx) {
+ Create_function_stmtContext f = func.get(name.toUpperCase());
+ if (f == null) {
+ return execProc(name, ctx, false /*trace error if not exists*/);
+ }
+ if (trace) {
+ trace(ctx, "EXEC PACKAGE FUNCTION " + this.name + "." + name);
+ }
+ ArrayList actualParams = function.getActualCallParameters(ctx);
+ exec.enterScope(Scope.Type.ROUTINE, this);
+ InMemoryFunctionRegistry.setCallParameters(name, ctx, actualParams, f.create_routine_params(), null, exec);
+ visit(f.single_block_stmt());
+ exec.leaveScope();
+ return true;
+ }
+
+ /**
+ * Execute procedure
+ */
+ public boolean execProc(String name, Expr_func_paramsContext ctx,
+ boolean traceNotExists) {
+ Create_procedure_stmtContext p = proc.get(name.toUpperCase());
+ if (p == null) {
+ if (trace && traceNotExists) {
+ trace(ctx, "Package procedure not found: " + this.name + "." + name);
+ }
+ return false;
+ }
+ if (trace) {
+ trace(ctx, "EXEC PACKAGE PROCEDURE " + this.name + "." + name);
+ }
+ ArrayList actualParams = function.getActualCallParameters(ctx);
+ HashMap out = new HashMap();
+ exec.enterScope(Scope.Type.ROUTINE, this);
+ exec.callStackPush(name);
+ if (p.declare_block_inplace() != null) {
+ visit(p.declare_block_inplace());
+ }
+ if (p.create_routine_params() != null) {
+ InMemoryFunctionRegistry.setCallParameters(name, ctx, actualParams, p.create_routine_params(), out, exec);
+ }
+ visit(p.procedure_block());
+ exec.callStackPop();
+ exec.leaveScope();
+ for (Map.Entry i : out.entrySet()) { // Set OUT parameters
+ exec.setVariable(i.getKey(), i.getValue());
+ }
+ return true;
+ }
+
+ /**
+ * Set whether all members are public (when package specification is missed) or not
+ */
+ void setAllMembersPublic(boolean value) {
+ allMembersPublic = value;
+ }
+
+ /**
+ * Execute rules
+ */
+ Integer visit(ParserRuleContext ctx) {
+ return exec.visit(ctx);
+ }
+
+ /**
+ * Trace information
+ */
+ public void trace(ParserRuleContext ctx, String message) {
+ if (trace) {
+ exec.trace(ctx, message);
+ }
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/Plsql.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/Plsql.java
new file mode 100644
index 00000000000000..06c6058a3981b9
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/Plsql.java
@@ -0,0 +1,27 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Hplsql.java
+// and modified by Doris
+
+package org.apache.doris.plsql;
+
+public class Plsql {
+ public static void main(String[] args) throws Exception {
+ System.exit(new Exec().run(args));
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/Query.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/Query.java
new file mode 100644
index 00000000000000..875431e6d45dc2
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/Query.java
@@ -0,0 +1,144 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Query.java
+// and modified by Doris
+
+package org.apache.doris.plsql;
+
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+
+public class Query {
+ String sql;
+ Connection conn;
+ Statement stmt;
+ PreparedStatement pstmt;
+ ResultSet rs;
+ Exception exception;
+
+ Query() {
+ }
+
+ public Query(String sql) {
+ this.sql = sql;
+ }
+
+ /**
+ * Set query objects
+ */
+ public void set(Connection conn, Statement stmt, ResultSet rs) {
+ this.conn = conn;
+ this.stmt = stmt;
+ this.rs = rs;
+ }
+
+ public void set(Connection conn, PreparedStatement pstmt) {
+ this.conn = conn;
+ this.pstmt = pstmt;
+ }
+
+ /**
+ * Close statement results
+ */
+ public void closeStatement() {
+ try {
+ if (rs != null) {
+ rs.close();
+ rs = null;
+ }
+ if (stmt != null) {
+ stmt.close();
+ stmt = null;
+ }
+ if (pstmt != null) {
+ pstmt.close();
+ pstmt = null;
+ }
+ } catch (SQLException e) {
+ e.printStackTrace();
+ }
+ }
+
+ /**
+ * Set SQL statement
+ */
+ public void setSql(String sql) {
+ this.sql = sql;
+ }
+
+ /**
+ * Set an execution error
+ */
+ public void setError(Exception e) {
+ exception = e;
+ }
+
+ /**
+ * Print error stack trace
+ */
+ public void printStackTrace() {
+ if (exception != null) {
+ exception.printStackTrace();
+ }
+ }
+
+ /**
+ * Get the result set object
+ */
+ public ResultSet getResultSet() {
+ return rs;
+ }
+
+ /**
+ * Get the prepared statement object
+ */
+ public PreparedStatement getPreparedStatement() {
+ return pstmt;
+ }
+
+ /**
+ * Get the connection object
+ */
+ public Connection getConnection() {
+ return conn;
+ }
+
+ /**
+ * Return error information
+ */
+ public boolean error() {
+ return exception != null;
+ }
+
+ public String errorText() {
+ if (exception != null) {
+ if (exception instanceof ClassNotFoundException) {
+ return "ClassNotFoundException: " + exception.getMessage();
+ }
+ return exception.getMessage();
+ }
+ return "";
+ }
+
+ public Exception getException() {
+ return exception;
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/Row.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/Row.java
new file mode 100644
index 00000000000000..a5de30fbf1b5df
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/Row.java
@@ -0,0 +1,104 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Row.java
+// and modified by Doris
+
+package org.apache.doris.plsql;
+
+import java.util.List;
+import java.util.stream.Collectors;
+
+/**
+ * Table row (all columns)
+ */
+public class Row {
+ private final org.apache.doris.plsql.ColumnMap colMap
+ = new org.apache.doris.plsql.ColumnMap();
+
+ public Row() {
+ }
+
+ Row(Row row) {
+ for (Column c : row.colMap.columns()) {
+ addColumnDefinition(c.getName(), c.getType());
+ }
+ }
+
+ /**
+ * Add a column with specified data type
+ */
+ public void addColumnDefinition(String name, String type) {
+ colMap.add(new Column(name, type, null));
+ }
+
+ public void addColumn(String name, String type, Var value) {
+ Column column = new Column(name, type, value);
+ colMap.add(column);
+ }
+
+ /**
+ * Get the data type by column name
+ */
+ public String getType(String name) {
+ Column column = colMap.get(name);
+ return column != null ? column.getType() : null;
+ }
+
+ /**
+ * Get value by index
+ */
+ public Var getValue(int i) {
+ return colMap.at(i).getValue();
+ }
+
+ /**
+ * Get value by column name
+ */
+ Var getValue(String name) {
+ Column column = colMap.get(name);
+ return column != null ? column.getValue() : null;
+ }
+
+ /**
+ * Get columns
+ */
+ List getColumns() {
+ return colMap.columns();
+ }
+
+ /**
+ * Get column by index
+ */
+ public Column getColumn(int i) {
+ return colMap.at(i);
+ }
+
+ /**
+ * Get the number of columns
+ */
+ int size() {
+ return colMap.size();
+ }
+
+ public List columnDefinitions() {
+ return getColumns().stream().map(Column::definition).collect(Collectors.toList());
+ }
+}
+
+
+
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/Scope.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/Scope.java
new file mode 100644
index 00000000000000..61e51e4faa3837
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/Scope.java
@@ -0,0 +1,80 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Scope.java
+// and modified by Doris
+
+package org.apache.doris.plsql;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * PL/SQL block scope
+ */
+public class Scope {
+
+ public enum Type {
+ GLOBAL, BEGIN_END, LOOP, HANDLER, PACKAGE, ROUTINE
+ }
+
+ Map vars = new HashMap<>();
+ ArrayList handlers = new ArrayList();
+ Scope parent;
+ Type type;
+ Package pack;
+
+ Scope(Type type) {
+ this.parent = null;
+ this.type = type;
+ this.pack = null;
+ }
+
+ Scope(Scope parent, Type type) {
+ this.parent = parent;
+ this.type = type;
+ this.pack = null;
+ }
+
+ Scope(Scope parent, Type type, Package pack) {
+ this.parent = parent;
+ this.type = type;
+ this.pack = pack;
+ }
+
+ /**
+ * Add a local variable
+ */
+ void addVariable(Var var) {
+ vars.put(var.name.toUpperCase(), var);
+ }
+
+ /**
+ * Add a condition handler
+ */
+ void addHandler(Handler handler) {
+ handlers.add(handler);
+ }
+
+ /**
+ * Get the parent scope
+ */
+ Scope getParent() {
+ return parent;
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/Signal.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/Signal.java
new file mode 100644
index 00000000000000..9e8de04d9019e7
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/Signal.java
@@ -0,0 +1,54 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Signal.java
+// and modified by Doris
+
+package org.apache.doris.plsql;
+
+/**
+ * Signals and exceptions
+ */
+public class Signal {
+ public enum Type {
+ LEAVE_LOOP, LEAVE_ROUTINE, LEAVE_PROGRAM, SQLEXCEPTION, NOTFOUND, TOO_MANY_ROWS, UNSUPPORTED_OPERATION,
+ USERDEFINED, VALIDATION
+ }
+
+ Type type;
+ String value = "";
+ Exception exception = null;
+
+ Signal(Type type, String value) {
+ this.type = type;
+ this.value = value;
+ this.exception = null;
+ }
+
+ Signal(Type type, String value, Exception exception) {
+ this.type = type;
+ this.value = value;
+ this.exception = exception;
+ }
+
+ /**
+ * Get the signal value (message text)
+ */
+ public String getValue() {
+ return value;
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/SqlCodes.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/SqlCodes.java
new file mode 100644
index 00000000000000..c236855d39b4f6
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/SqlCodes.java
@@ -0,0 +1,28 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/SqlCodes.java
+// and modified by Doris
+
+package org.apache.doris.plsql;
+
+public class SqlCodes {
+ public static int NO_DATA_FOUND = 100;
+ public static int TOO_MANY_ROWS = -1422;
+ public static int SUCCESS = 0;
+ public static int ERROR = -1;
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/Stmt.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/Stmt.java
new file mode 100644
index 00000000000000..d7268a000f4601
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/Stmt.java
@@ -0,0 +1,1026 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Stmt.java
+// and modified by Doris
+
+package org.apache.doris.plsql;
+
+import org.apache.doris.common.AnalysisException;
+import org.apache.doris.nereids.PLParser.Allocate_cursor_stmtContext;
+import org.apache.doris.nereids.PLParser.Assignment_stmt_select_itemContext;
+import org.apache.doris.nereids.PLParser.Associate_locator_stmtContext;
+import org.apache.doris.nereids.PLParser.Break_stmtContext;
+import org.apache.doris.nereids.PLParser.Close_stmtContext;
+import org.apache.doris.nereids.PLParser.Declare_cursor_itemContext;
+import org.apache.doris.nereids.PLParser.Doris_statementContext;
+import org.apache.doris.nereids.PLParser.Exec_stmtContext;
+import org.apache.doris.nereids.PLParser.Exit_stmtContext;
+import org.apache.doris.nereids.PLParser.Fetch_stmtContext;
+import org.apache.doris.nereids.PLParser.For_cursor_stmtContext;
+import org.apache.doris.nereids.PLParser.For_range_stmtContext;
+import org.apache.doris.nereids.PLParser.Get_diag_stmt_exception_itemContext;
+import org.apache.doris.nereids.PLParser.Get_diag_stmt_rowcount_itemContext;
+import org.apache.doris.nereids.PLParser.If_bteq_stmtContext;
+import org.apache.doris.nereids.PLParser.If_plsql_stmtContext;
+import org.apache.doris.nereids.PLParser.If_tsql_stmtContext;
+import org.apache.doris.nereids.PLParser.Include_stmtContext;
+import org.apache.doris.nereids.PLParser.Leave_stmtContext;
+import org.apache.doris.nereids.PLParser.Open_stmtContext;
+import org.apache.doris.nereids.PLParser.Print_stmtContext;
+import org.apache.doris.nereids.PLParser.Quit_stmtContext;
+import org.apache.doris.nereids.PLParser.Resignal_stmtContext;
+import org.apache.doris.nereids.PLParser.Return_stmtContext;
+import org.apache.doris.nereids.PLParser.Set_current_schema_optionContext;
+import org.apache.doris.nereids.PLParser.Signal_stmtContext;
+import org.apache.doris.nereids.PLParser.Unconditional_loop_stmtContext;
+import org.apache.doris.nereids.PLParser.Values_into_stmtContext;
+import org.apache.doris.nereids.PLParser.While_stmtContext;
+import org.apache.doris.nereids.trees.plans.commands.info.FuncNameInfo;
+import org.apache.doris.plsql.Var.Type;
+import org.apache.doris.plsql.exception.QueryException;
+import org.apache.doris.plsql.exception.UndefinedIdentException;
+import org.apache.doris.plsql.executor.Metadata;
+import org.apache.doris.plsql.executor.PlsqlResult;
+import org.apache.doris.plsql.executor.QueryExecutor;
+import org.apache.doris.plsql.executor.QueryResult;
+import org.apache.doris.plsql.executor.ResultListener;
+import org.apache.doris.plsql.objects.Table;
+
+import org.antlr.v4.runtime.ParserRuleContext;
+
+import java.sql.SQLException;
+import java.util.List;
+import java.util.Stack;
+import java.util.stream.Collectors;
+import java.util.stream.IntStream;
+
+/**
+ * PL/SQL statements execution
+ */
+public class Stmt {
+ Exec exec = null;
+ Stack stack = null;
+ Conf conf;
+ Meta meta;
+ Console console;
+
+ boolean trace = false;
+ ResultListener resultListener = ResultListener.NONE;
+ private QueryExecutor queryExecutor;
+
+ Stmt(Exec e, QueryExecutor queryExecutor) {
+ exec = e;
+ stack = exec.getStack();
+ conf = exec.getConf();
+ meta = exec.getMeta();
+ trace = exec.getTrace();
+ console = exec.console;
+ this.queryExecutor = queryExecutor;
+ }
+
+ public void setResultListener(ResultListener resultListener) {
+ this.resultListener = resultListener;
+ }
+
+ /**
+ * Executing Statement statement
+ */
+ public Integer statement(ParserRuleContext ctx) {
+ trace(ctx, "SELECT");
+ if (exec.getOffline()) {
+ trace(ctx, "Not executed - offline mode set");
+ return 0;
+ }
+
+ QueryResult query = queryExecutor.executeQuery(exec.logicalPlanBuilder.getOriginSql(ctx), ctx);
+ resultListener.setProcessor(query.processor());
+
+ if (query.error()) {
+ exec.signal(query);
+ return 1;
+ }
+ trace(ctx, "statement completed successfully");
+ exec.setSqlSuccess();
+ try {
+ int intoCount = getIntoCount(ctx);
+ if (intoCount > 0) {
+ // TODO SELECT BULK COLLECT INTO statement executed
+ trace(ctx, "SELECT INTO statement executed");
+ if (query.next()) {
+ for (int i = 0; i < intoCount; i++) {
+ populateVariable(ctx, query, i);
+ }
+ exec.incRowCount();
+ exec.setSqlSuccess();
+ if (query.next()) {
+ exec.setSqlCode(SqlCodes.TOO_MANY_ROWS);
+ exec.signal(Signal.Type.TOO_MANY_ROWS);
+ }
+ } else {
+ exec.setSqlCode(SqlCodes.NO_DATA_FOUND);
+ exec.signal(Signal.Type.NOTFOUND);
+ }
+ } else if (ctx instanceof Doris_statementContext) { // only from visitStatement
+ // Print all results for standalone Statement.
+ if (query.metadata() != null) {
+ resultListener.onMetadata(query.metadata());
+ int cols = query.columnCount();
+ if (trace) {
+ trace(ctx, "Standalone statement executed: " + cols + " columns in the result set");
+ }
+ while (query.next()) {
+ if (resultListener instanceof PlsqlResult) { // if running from mysql clent
+ resultListener.onMysqlRow(query.mysqlRow());
+ } else { // if running from plsql.sh
+ Object[] row = new Object[cols]; // TODO if there is a large amount of data?
+ for (int i = 0; i < cols; i++) {
+ row[i] = query.column(i, Object.class);
+ if (i > 0) {
+ console.print("\t");
+ }
+ console.print(String.valueOf(row[i]));
+ }
+ console.printLine("");
+ exec.incRowCount();
+
+ resultListener.onRow(row);
+ }
+ }
+ resultListener.onEof();
+ }
+ } else { // Scalar subquery, such as visitExpr
+ trace(ctx, "Scalar subquery executed, first row and first column fetched only");
+ if (query.next()) {
+ exec.stackPush(new Var().setValue(query, 1));
+ exec.setSqlSuccess();
+ } else {
+ evalNull();
+ exec.setSqlCode(SqlCodes.NO_DATA_FOUND);
+ }
+ }
+ } catch (QueryException | AnalysisException e) {
+ if (query.error()) {
+ exec.signal(query);
+ } else {
+ exec.signal(e);
+ }
+ query.close();
+ return 1;
+ }
+ query.close();
+ return 0;
+ }
+
+ /**
+ * Get number of elements in INTO or var=col assignment clause
+ */
+ int getIntoCount(ParserRuleContext ctx) {
+ // TODO
+ return 0;
+ }
+
+ /**
+ * Get variable name assigned in INTO or var=col clause by index
+ */
+ String getIntoVariable(ParserRuleContext ctx, int idx) {
+ // TODO
+ return null;
+ }
+
+ private int getIntoTableIndex(ParserRuleContext ctx, int idx) {
+ // TODO
+ return 0;
+ }
+
+ private void populateVariable(ParserRuleContext ctx, QueryResult query, int columnIndex) throws AnalysisException {
+ String intoName = getIntoVariable(ctx, columnIndex);
+ Var var = exec.findVariable(intoName);
+ if (var != null) {
+ if (var.type == Var.Type.PL_OBJECT && var.value instanceof Table) {
+ Table table = (Table) var.value;
+ table.populate(query, getIntoTableIndex(ctx, columnIndex), columnIndex);
+ } else if (var.type == Var.Type.ROW) {
+ var.setRowValues(query);
+ } else {
+ var.setValue(query, columnIndex);
+ }
+ exec.trace(ctx, var, query.metadata(), columnIndex);
+ } else {
+ throw new UndefinedIdentException(ctx, intoName);
+ }
+ }
+
+ /**
+ * ALLOCATE CURSOR statement
+ */
+ public Integer allocateCursor(Allocate_cursor_stmtContext ctx) {
+ trace(ctx, "ALLOCATE CURSOR");
+ String name = ctx.ident_pl(0).getText();
+ Var cur = null;
+ if (ctx.PROCEDURE() != null) {
+ cur = exec.consumeReturnCursor(ctx.ident_pl(1).getText());
+ } else if (ctx.RESULT() != null) {
+ cur = exec.findVariable(ctx.ident_pl(1).getText());
+ if (cur != null && cur.type != Type.RS_LOCATOR) {
+ cur = null;
+ }
+ }
+ if (cur == null) {
+ trace(ctx, "Cursor for procedure not found: " + name);
+ exec.signal(Signal.Type.SQLEXCEPTION);
+ return -1;
+ }
+ exec.addVariable(new Var(name, Type.CURSOR, cur.value));
+ return 0;
+ }
+
+ /**
+ * ASSOCIATE LOCATOR statement
+ */
+ public Integer associateLocator(Associate_locator_stmtContext ctx) {
+ trace(ctx, "ASSOCIATE LOCATOR");
+ int cnt = ctx.ident_pl().size();
+ if (cnt < 2) {
+ return -1;
+ }
+ String procedure = ctx.ident_pl(cnt - 1).getText();
+ for (int i = 0; i < cnt - 1; i++) {
+ Var cur = exec.consumeReturnCursor(procedure);
+ if (cur != null) {
+ String name = ctx.ident_pl(i).getText();
+ Var loc = exec.findVariable(name);
+ if (loc == null) {
+ loc = new Var(name, Type.RS_LOCATOR, cur.value);
+ exec.addVariable(loc);
+ } else {
+ loc.setValue(cur.value);
+ }
+ }
+ }
+ return 0;
+ }
+
+ /**
+ * DECLARE cursor statement
+ */
+ public Integer declareCursor(Declare_cursor_itemContext ctx) {
+ String name = ctx.ident_pl().getText();
+ if (trace) {
+ trace(ctx, "DECLARE CURSOR " + name);
+ }
+ Cursor cursor = new Cursor(null);
+ if (ctx.expr() != null) {
+ cursor.setExprCtx(ctx.expr());
+ } else if (ctx.query() != null) {
+ cursor.setSelectCtx(ctx.query());
+ }
+ if (ctx.cursor_with_return() != null) {
+ cursor.setWithReturn(true);
+ }
+ Var var = new Var(name, Type.CURSOR, cursor);
+ exec.addVariable(var);
+ return 0;
+ }
+
+ /**
+ * OPEN cursor statement
+ */
+ public Integer open(Open_stmtContext ctx) {
+ trace(ctx, "OPEN");
+ Cursor cursor = null;
+ Var var = null;
+ String cursorName = ctx.ident_pl().getText();
+ String sql = null;
+ if (ctx.FOR() != null) { // SELECT statement or dynamic SQL
+ sql = ctx.expr() != null ? exec.logicalPlanBuilder.getOriginSql(ctx.expr())
+ : exec.logicalPlanBuilder.getOriginSql(ctx.query());
+ cursor = new Cursor(sql);
+ var = exec.findCursor(cursorName); // Can be a ref cursor variable
+ if (var == null) {
+ var = new Var(cursorName, Type.CURSOR, cursor);
+ exec.addVariable(var);
+ } else {
+ var.setValue(cursor);
+ }
+ } else { // Declared cursor
+ var = exec.findVariable(cursorName);
+ if (var != null && var.type == Type.CURSOR) {
+ cursor = (Cursor) var.value;
+ if (cursor.getSqlExpr() != null) {
+ cursor.setSql(exec.logicalPlanBuilder.getOriginSql(cursor.getSqlExpr()));
+ } else if (cursor.getSqlSelect() != null) {
+ cursor.setSql(exec.logicalPlanBuilder.getOriginSql(cursor.getSqlSelect()));
+ }
+ }
+ }
+ if (cursor != null) {
+ if (trace) {
+ trace(ctx, cursorName + ": " + sql);
+ }
+ cursor.open(queryExecutor, ctx);
+ QueryResult queryResult = cursor.getQueryResult();
+ if (queryResult.error()) {
+ exec.signal(queryResult);
+ return 1;
+ } else if (!exec.getOffline()) {
+ exec.setSqlCode(SqlCodes.SUCCESS);
+ }
+ if (cursor.isWithReturn()) {
+ exec.addReturnCursor(var);
+ }
+ } else {
+ trace(ctx, "Cursor not found: " + cursorName);
+ exec.setSqlCode(SqlCodes.ERROR);
+ exec.signal(Signal.Type.SQLEXCEPTION);
+ return 1;
+ }
+ return 0;
+ }
+
+ /**
+ * FETCH cursor statement
+ */
+ public Integer fetch(Fetch_stmtContext ctx) {
+ trace(ctx, "FETCH");
+ String name = ctx.ident_pl(0).getText();
+ Var varCursor = exec.findCursor(name);
+ if (varCursor == null) {
+ trace(ctx, "Cursor not found: " + name);
+ exec.setSqlCode(SqlCodes.ERROR);
+ exec.signal(Signal.Type.SQLEXCEPTION);
+ return 1;
+ } else if (varCursor.value == null) {
+ trace(ctx, "Cursor not open: " + name);
+ exec.setSqlCode(SqlCodes.ERROR);
+ exec.signal(Signal.Type.SQLEXCEPTION);
+ return 1;
+ } else if (exec.getOffline()) {
+ exec.setSqlCode(SqlCodes.NO_DATA_FOUND);
+ exec.signal(Signal.Type.NOTFOUND);
+ return 0;
+ }
+ // Assign values from the row to local variables
+ try {
+ Cursor cursor = (Cursor) varCursor.value;
+ int cols = ctx.ident_pl().size() - 1;
+ QueryResult queryResult = cursor.getQueryResult();
+
+ if (ctx.bulk_collect_clause() != null) {
+ long limit = ctx.fetch_limit() != null ? evalPop(ctx.fetch_limit().expr()).longValue() : -1;
+ long rowIndex = 1;
+ List
tables = exec.intoTables(ctx, intoVariableNames(ctx, cols));
+ tables.forEach(Table::removeAll);
+ while (queryResult.next()) {
+ cursor.setFetch(true);
+ for (int i = 0; i < cols; i++) {
+ Table table = tables.get(i);
+ table.populate(queryResult, rowIndex, i);
+ }
+ rowIndex++;
+ if (limit != -1 && rowIndex - 1 >= limit) {
+ break;
+ }
+ }
+ } else {
+ if (queryResult.next()) {
+ cursor.setFetch(true);
+ for (int i = 0; i < cols; i++) {
+ Var var = exec.findVariable(ctx.ident_pl(i + 1).getText());
+ if (var != null) { // Variables must be defined in advance by DECLARE etc.
+ if (var.type != Var.Type.ROW) {
+ var.setValue(queryResult, i); // Set value of each column into variable
+ } else {
+ var.setRowValues(queryResult);
+ }
+ if (trace) {
+ trace(ctx, var, queryResult.metadata(), i);
+ }
+ } else if (trace) {
+ trace(ctx, "Variable not found: " + ctx.ident_pl(i + 1).getText());
+ }
+ }
+ exec.incRowCount();
+ exec.setSqlSuccess();
+ } else {
+ cursor.setFetch(false);
+ exec.setSqlCode(SqlCodes.NO_DATA_FOUND); // Check when exiting cursor
+ }
+ }
+ } catch (QueryException | AnalysisException e) {
+ exec.setSqlCode(e);
+ exec.signal(Signal.Type.SQLEXCEPTION, e.getMessage(), e);
+ }
+ return 0;
+ }
+
+ private List intoVariableNames(Fetch_stmtContext ctx, int count) {
+ return IntStream.range(0, count).mapToObj(i -> ctx.ident_pl(i + 1).getText()).collect(Collectors.toList());
+ }
+
+
+ /**
+ * CLOSE cursor statement
+ */
+ public Integer close(Close_stmtContext ctx) {
+ trace(ctx, "CLOSE");
+ String name = ctx.IDENTIFIER().toString();
+ Var var = exec.findVariable(name);
+ if (var != null && var.type == Type.CURSOR) {
+ ((Cursor) var.value).close();
+ exec.setSqlCode(SqlCodes.SUCCESS);
+ } else if (trace) {
+ trace(ctx, "Cursor not found: " + name);
+ }
+ return 0;
+ }
+
+ /**
+ * INCLUDE statement
+ */
+ public Integer include(Include_stmtContext ctx) {
+ String file;
+ if (ctx.file_name() != null) {
+ file = ctx.file_name().getText();
+ } else {
+ file = evalPop(ctx.expr()).toString();
+ }
+ trace(ctx, "INCLUDE " + file);
+ exec.includeFile(file, true);
+ return 0;
+ }
+
+ /**
+ * IF statement (PL/SQL syntax)
+ */
+ public Integer ifPlsql(If_plsql_stmtContext ctx) {
+ boolean trueExecuted = false;
+ trace(ctx, "IF");
+ if (evalPop(ctx.bool_expr()).isTrue()) {
+ trace(ctx, "IF TRUE executed");
+ visit(ctx.block());
+ trueExecuted = true;
+ } else if (ctx.elseif_block() != null) {
+ int cnt = ctx.elseif_block().size();
+ for (int i = 0; i < cnt; i++) {
+ if (evalPop(ctx.elseif_block(i).bool_expr()).isTrue()) {
+ trace(ctx, "ELSE IF executed");
+ visit(ctx.elseif_block(i).block());
+ trueExecuted = true;
+ break;
+ }
+ }
+ }
+ if (!trueExecuted && ctx.else_block() != null) {
+ trace(ctx, "ELSE executed");
+ visit(ctx.else_block());
+ }
+ return 0;
+ }
+
+ /**
+ * IF statement (Transact-SQL syntax)
+ */
+ public Integer ifTsql(If_tsql_stmtContext ctx) {
+ trace(ctx, "IF");
+ visit(ctx.bool_expr());
+ if (exec.stackPop().isTrue()) {
+ trace(ctx, "IF TRUE executed");
+ visit(ctx.single_block_stmt(0));
+ } else if (ctx.ELSE() != null) {
+ trace(ctx, "ELSE executed");
+ visit(ctx.single_block_stmt(1));
+ }
+ return 0;
+ }
+
+ /**
+ * IF statement (BTEQ syntax)
+ */
+ public Integer ifBteq(If_bteq_stmtContext ctx) {
+ trace(ctx, "IF");
+ visit(ctx.bool_expr());
+ if (exec.stackPop().isTrue()) {
+ trace(ctx, "IF TRUE executed");
+ visit(ctx.single_block_stmt());
+ }
+ return 0;
+ }
+
+ /**
+ * Assignment from SELECT statement
+ */
+ public Integer assignFromSelect(Assignment_stmt_select_itemContext ctx) {
+ String sql = evalPop(ctx.query()).toString();
+ if (trace) {
+ trace(ctx, sql);
+ }
+ QueryResult query = queryExecutor.executeQuery(sql, ctx);
+ if (query.error()) {
+ exec.signal(query);
+ return 1;
+ }
+ exec.setSqlSuccess();
+ try {
+ int cnt = ctx.ident_pl().size();
+ if (query.next()) {
+ for (int i = 0; i < cnt; i++) {
+ Var var = exec.findVariable(ctx.ident_pl(i).getText());
+ if (var != null) {
+ var.setValue(query, i);
+ if (trace) {
+ trace(ctx, "COLUMN: " + query.metadata().columnName(i) + ", " + query.metadata()
+ .columnTypeName(i));
+ trace(ctx, "SET " + var.getName() + " = " + var.toString());
+ }
+ } else if (trace) {
+ trace(ctx, "Variable not found: " + ctx.ident_pl(i).getText());
+ }
+ }
+ exec.incRowCount();
+ exec.setSqlSuccess();
+ } else {
+ exec.setSqlCode(SqlCodes.NO_DATA_FOUND);
+ exec.signal(Signal.Type.NOTFOUND);
+ }
+ } catch (QueryException | AnalysisException e) {
+ exec.signal(query);
+ return 1;
+ } finally {
+ query.close();
+ }
+ return 0;
+ }
+
+ /**
+ * GET DIAGNOSTICS EXCEPTION statement
+ */
+ public Integer getDiagnosticsException(Get_diag_stmt_exception_itemContext ctx) {
+ trace(ctx, "GET DIAGNOSTICS EXCEPTION");
+ Signal signal = exec.signalPeek();
+ if (signal == null || (signal != null && signal.type != Signal.Type.SQLEXCEPTION)) {
+ signal = exec.currentSignal;
+ }
+ if (signal != null) {
+ exec.setVariable(ctx.qident().getText(), signal.getValue());
+ }
+ return 0;
+ }
+
+ /**
+ * GET DIAGNOSTICS ROW_COUNT statement
+ */
+ public Integer getDiagnosticsRowCount(Get_diag_stmt_rowcount_itemContext ctx) {
+ trace(ctx, "GET DIAGNOSTICS ROW_COUNT");
+ exec.setVariable(ctx.qident().getText(), exec.getRowCount());
+ return 0;
+ }
+
+ public Integer use(ParserRuleContext ctx, String sql) {
+ if (trace) {
+ trace(ctx, "SQL statement: " + sql);
+ }
+ QueryResult query = queryExecutor.executeQuery(sql, ctx);
+ if (query.error()) {
+ exec.signal(query);
+ return 1;
+ }
+ exec.setSqlCode(SqlCodes.SUCCESS);
+ query.close();
+ return 0;
+ }
+
+ /**
+ * VALUES statement
+ */
+ public Integer values(Values_into_stmtContext ctx) {
+ trace(ctx, "VALUES statement");
+ int cnt = ctx.ident_pl().size(); // Number of variables and assignment expressions
+ int ecnt = ctx.expr().size();
+ for (int i = 0; i < cnt; i++) {
+ String name = ctx.ident_pl(i).getText();
+ if (i < ecnt) {
+ visit(ctx.expr(i));
+ Var var = exec.setVariable(name);
+ if (trace) {
+ trace(ctx, "SET " + name + " = " + var.toString());
+ }
+ }
+ }
+ return 0;
+ }
+
+ /**
+ * WHILE statement
+ */
+ public Integer while_(While_stmtContext ctx) {
+ trace(ctx, "WHILE - ENTERED");
+ String label = exec.labelPop();
+ while (true) {
+ if (evalPop(ctx.bool_expr()).isTrue()) {
+ exec.enterScope(Scope.Type.LOOP);
+ visit(ctx.block());
+ exec.leaveScope();
+ if (canContinue(label)) {
+ continue;
+ }
+ }
+ break;
+ }
+ trace(ctx, "WHILE - LEFT");
+ return 0;
+ }
+
+ /**
+ * FOR cursor statement
+ */
+ public Integer forCursor(For_cursor_stmtContext ctx) {
+ trace(ctx, "FOR CURSOR - ENTERED");
+ exec.enterScope(Scope.Type.LOOP);
+ String cursor = ctx.IDENTIFIER().getText();
+ String sql = exec.logicalPlanBuilder.getOriginSql(ctx.query());
+ trace(ctx, sql);
+ QueryResult query = exec.queryExecutor.executeQuery(sql, ctx);
+ if (query.error()) {
+ exec.signal(query);
+ return 1;
+ }
+ trace(ctx, "SELECT completed successfully");
+ exec.setSqlSuccess();
+ try {
+ int cols = query.columnCount();
+ Row row = new Row();
+ for (int i = 0; i < cols; i++) {
+ row.addColumnDefinition(query.metadata().columnName(i), query.metadata().columnTypeName(i));
+ }
+ Var var = new Var(cursor, row);
+ exec.addVariable(var);
+ while (query.next()) {
+ var.setRowValues(query);
+ if (trace) {
+ trace(ctx, var, query.metadata(), 0);
+ }
+ visit(ctx.block());
+ exec.incRowCount();
+ }
+ } catch (QueryException | AnalysisException e) {
+ exec.signal(e);
+ query.close();
+ return 1;
+ }
+ exec.setSqlSuccess();
+ query.close();
+ exec.leaveScope();
+ trace(ctx, "FOR CURSOR - LEFT");
+ return 0;
+ }
+
+ /**
+ * FOR (integer range) statement
+ */
+ public Integer forRange(For_range_stmtContext ctx) {
+ trace(ctx, "FOR RANGE - ENTERED");
+ int start = evalPop(ctx.expr(0)).intValue();
+ int end = evalPop(ctx.expr(1)).intValue();
+ int step = evalPop(ctx.expr(2), 1L).intValue();
+ exec.enterScope(Scope.Type.LOOP);
+ Var index = setIndex(start, end, ctx);
+ exec.addVariable(index);
+ for (int i = start; i <= end; i += step) {
+ visit(ctx.block());
+ updateIndex(step, index, ctx);
+ }
+ exec.leaveScope();
+ trace(ctx, "FOR RANGE - LEFT");
+ return 0;
+ }
+
+ public Integer unconditionalLoop(Unconditional_loop_stmtContext ctx) {
+ trace(ctx, "UNCONDITIONAL LOOP - ENTERED");
+ String label = exec.labelPop();
+ do {
+ exec.enterScope(Scope.Type.LOOP);
+ visit(ctx.block());
+ exec.leaveScope();
+ } while (canContinue(label));
+ trace(ctx, "UNCONDITIONAL LOOP - LEFT");
+ return 0;
+ }
+
+ /**
+ * To set the Value index for FOR Statement
+ */
+ private Var setIndex(int start, int end, For_range_stmtContext ctx) {
+
+ if (ctx.REVERSE() == null) {
+ return new Var(ctx.IDENTIFIER().getText(), Long.valueOf(start));
+ } else {
+ return new Var(ctx.IDENTIFIER().getText(), Long.valueOf(end));
+ }
+ }
+
+ /**
+ * To update the value of index for FOR Statement
+ */
+ private void updateIndex(int step, Var index, For_range_stmtContext ctx) {
+
+ if (ctx.REVERSE() == null) {
+ index.increment(step);
+ } else {
+ index.decrement(step);
+ }
+ }
+
+ /**
+ * EXEC, EXECUTE and EXECUTE IMMEDIATE statement to execute dynamic SQL or stored procedure
+ */
+ public Integer exec(Exec_stmtContext ctx) {
+ if (execProc(ctx)) {
+ return 0;
+ }
+ trace(ctx, "EXECUTE");
+ Var vsql = evalPop(ctx.expr());
+ String sql = vsql.toString();
+ if (trace) {
+ trace(ctx, "SQL statement: " + sql);
+ }
+ QueryResult query = queryExecutor.executeQuery(sql, ctx);
+ if (query.error()) {
+ exec.signal(query);
+ return 1;
+ }
+ try {
+ if (ctx.INTO() != null) {
+ int cols = ctx.IDENTIFIER().size();
+ if (query.next()) {
+ for (int i = 0; i < cols; i++) {
+ Var var = exec.findVariable(ctx.IDENTIFIER(i).getText());
+ if (var != null) {
+ if (var.type != Type.ROW) {
+ var.setValue(query, i);
+ } else {
+ var.setRowValues(query);
+ }
+ if (trace) {
+ trace(ctx, var, query.metadata(), i);
+ }
+ } else if (trace) {
+ trace(ctx, "Variable not found: " + ctx.IDENTIFIER(i).getText());
+ }
+ }
+ exec.setSqlCode(SqlCodes.SUCCESS);
+ }
+ } else { // Print the results
+ int cols = query.columnCount();
+ while (query.next()) {
+ for (int i = 0; i < cols; i++) {
+ if (i > 1) {
+ console.print("\t");
+ }
+ console.print(query.column(i, String.class));
+ }
+ console.printLine("");
+ }
+ }
+ } catch (QueryException | AnalysisException e) {
+ exec.signal(query);
+ query.close();
+ return 1;
+ }
+ query.close();
+ return 0;
+ }
+
+ /**
+ * EXEC to execute a stored procedure
+ */
+ public Boolean execProc(Exec_stmtContext ctx) {
+ String name = evalPop(ctx.expr()).toString().toUpperCase();
+ return exec.functions.exec(new FuncNameInfo(name), ctx.expr_func_params());
+ }
+
+ /**
+ * EXIT statement (leave the specified loop with a condition)
+ */
+ public Integer exit(Exit_stmtContext ctx) {
+ trace(ctx, "EXIT");
+ String label = "";
+ if (ctx.IDENTIFIER() != null) {
+ label = ctx.IDENTIFIER().toString();
+ }
+ if (ctx.WHEN() != null) {
+ if (evalPop(ctx.bool_expr()).isTrue()) {
+ leaveLoop(label);
+ }
+ } else {
+ leaveLoop(label);
+ }
+ return 0;
+ }
+
+ /**
+ * BREAK statement (leave the innermost loop unconditionally)
+ */
+ public Integer break_(Break_stmtContext ctx) {
+ trace(ctx, "BREAK");
+ leaveLoop("");
+ return 0;
+ }
+
+ /**
+ * LEAVE statement (leave the specified loop unconditionally)
+ */
+ public Integer leave(Leave_stmtContext ctx) {
+ trace(ctx, "LEAVE");
+ String label = "";
+ if (ctx.IDENTIFIER() != null) {
+ label = ctx.IDENTIFIER().toString();
+ }
+ leaveLoop(label);
+ return 0;
+ }
+
+ /**
+ * Leave the specified or innermost loop unconditionally
+ */
+ public void leaveLoop(String value) {
+ exec.signal(Signal.Type.LEAVE_LOOP, value);
+ }
+
+ /**
+ * PRINT Statement
+ */
+ public Integer print(Print_stmtContext ctx) {
+ trace(ctx, "PRINT");
+ if (ctx.expr() != null) {
+ console.printLine(evalPop(ctx.expr()).toString());
+ }
+ return 0;
+ }
+
+ /**
+ * QUIT Statement
+ */
+ public Integer quit(Quit_stmtContext ctx) {
+ trace(ctx, "QUIT");
+ String rc = null;
+ if (ctx.expr() != null) {
+ rc = evalPop(ctx.expr()).toString();
+ }
+ exec.signal(Signal.Type.LEAVE_PROGRAM, rc);
+ return 0;
+ }
+
+ /**
+ * SET current schema
+ */
+ public Integer setCurrentSchema(Set_current_schema_optionContext ctx) {
+ trace(ctx, "SET CURRENT SCHEMA");
+ return use(ctx, "USE " + meta.normalizeIdentifierPart(evalPop(ctx.expr()).toString()));
+ }
+
+ /**
+ * SIGNAL statement
+ */
+ public Integer signal(Signal_stmtContext ctx) {
+ trace(ctx, "SIGNAL");
+ Signal signal = new Signal(Signal.Type.USERDEFINED, ctx.ident_pl().getText());
+ exec.signal(signal);
+ return 0;
+ }
+
+ /**
+ * RESIGNAL statement
+ */
+ public Integer resignal(Resignal_stmtContext ctx) {
+ trace(ctx, "RESIGNAL");
+ if (ctx.SQLSTATE() != null) {
+ String sqlstate = evalPop(ctx.expr(0)).toString();
+ String text = "";
+ if (ctx.MESSAGE_TEXT() != null) {
+ text = evalPop(ctx.expr(1)).toString();
+ }
+ SQLException exception = new SQLException(text, sqlstate, -1);
+ Signal signal = new Signal(Signal.Type.SQLEXCEPTION, text, exception);
+ exec.setSqlCode(exception);
+ exec.resignal(signal);
+ } else {
+ exec.resignal();
+ }
+ return 0;
+ }
+
+ /**
+ * RETURN statement
+ */
+ public Integer return_(Return_stmtContext ctx) {
+ trace(ctx, "RETURN");
+ if (ctx.expr() != null) {
+ eval(ctx.expr());
+ }
+ exec.signal(Signal.Type.LEAVE_ROUTINE);
+ return 0;
+ }
+
+ /**
+ * Check if an exception is raised or EXIT executed, and we should leave the block
+ */
+ boolean canContinue(String label) {
+ Signal signal = exec.signalPeek();
+ if (signal != null && signal.type == Signal.Type.SQLEXCEPTION) {
+ return false;
+ }
+ signal = exec.signalPeek();
+ if (signal != null && signal.type == Signal.Type.LEAVE_LOOP) {
+ if (signal.value == null || signal.value.isEmpty() || (label != null && label.equalsIgnoreCase(
+ signal.value))) {
+ exec.signalPop();
+ } // why?
+ return false;
+ }
+ return true;
+ }
+
+ /**
+ * Evaluate the expression and push the value to the stack
+ */
+ void eval(ParserRuleContext ctx) {
+ exec.visit(ctx);
+ }
+
+ /**
+ * Evaluate the expression to specified String value
+ */
+ void evalString(String string) {
+ exec.stackPush(new Var(string));
+ }
+
+ void evalString(StringBuilder string) {
+ evalString(string.toString());
+ }
+
+ /**
+ * Evaluate the expression to NULL
+ */
+ void evalNull() {
+ exec.stackPush(Var.Null);
+ }
+
+ /**
+ * Evaluate the expression and pop value from the stack
+ */
+ Var evalPop(ParserRuleContext ctx) {
+ visit(ctx);
+ if (!exec.stack.isEmpty()) {
+ return exec.stackPop();
+ }
+ return Var.Empty;
+ }
+
+ Var evalPop(ParserRuleContext ctx, long def) {
+ if (ctx != null) {
+ exec.visit(ctx);
+ return exec.stackPop();
+ }
+ return new Var(def);
+ }
+
+ /**
+ * Execute rules
+ */
+ Integer visit(ParserRuleContext ctx) {
+ return exec.visit(ctx);
+ }
+
+ /**
+ * Execute children rules
+ */
+ Integer visitChildren(ParserRuleContext ctx) {
+ return exec.visitChildren(ctx);
+ }
+
+ /**
+ * Trace information
+ */
+ void trace(ParserRuleContext ctx, String message) {
+ exec.trace(ctx, message);
+ }
+
+ void trace(ParserRuleContext ctx, Var var, Metadata metadata, int idx) {
+ exec.trace(ctx, var, metadata, idx);
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/StreamGobbler.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/StreamGobbler.java
new file mode 100644
index 00000000000000..1fbb4c3115a557
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/StreamGobbler.java
@@ -0,0 +1,55 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/StreamGobbler.java
+// and modified by Doris
+
+package org.apache.doris.plsql;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+
+/**
+ * Read a stream from an external process
+ */
+public class StreamGobbler extends Thread {
+ private final Console console;
+ private final InputStream is;
+
+ StreamGobbler(InputStream is, Console console) {
+ this.is = is;
+ this.console = console;
+ }
+
+ public void run() {
+ try {
+ InputStreamReader isr = new InputStreamReader(is);
+ BufferedReader br = new BufferedReader(isr);
+ while (true) {
+ String line = br.readLine();
+ if (line == null) {
+ break;
+ }
+ console.printLine(line);
+ }
+ } catch (IOException ioe) {
+ ioe.printStackTrace();
+ }
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/SyntaxErrorReporter.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/SyntaxErrorReporter.java
new file mode 100644
index 00000000000000..2e6534daeaff99
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/SyntaxErrorReporter.java
@@ -0,0 +1,39 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/SyntaxErrorReporter.java
+// and modified by Doris
+
+package org.apache.doris.plsql;
+
+import org.antlr.v4.runtime.BaseErrorListener;
+import org.antlr.v4.runtime.RecognitionException;
+import org.antlr.v4.runtime.Recognizer;
+
+public class SyntaxErrorReporter extends BaseErrorListener {
+ private final Console console;
+
+ public SyntaxErrorReporter(Console console) {
+ this.console = console;
+ }
+
+ @Override
+ public void syntaxError(Recognizer, ?> recognizer, Object offendingSymbol, int line, int charPositionInLine,
+ String msg, RecognitionException e) {
+ console.printError("Syntax error at line " + line + ":" + charPositionInLine + " " + msg);
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/Timer.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/Timer.java
new file mode 100644
index 00000000000000..01e7f111a48f01
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/Timer.java
@@ -0,0 +1,61 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Timer.java
+// and modified by Doris
+
+package org.apache.doris.plsql;
+
+public class Timer {
+ long start = 0;
+ long stop = 0;
+ long elapsed = 0;
+
+ /**
+ * Start the timer
+ */
+ public long start() {
+ start = System.currentTimeMillis();
+ return start;
+ }
+
+ /**
+ * Get intermediate timer value
+ */
+ public long current() {
+ return System.currentTimeMillis();
+ }
+
+ /**
+ * Stop the timer and return elapsed time
+ */
+ public long stop() {
+ stop = System.currentTimeMillis();
+ elapsed = stop - start;
+ return elapsed;
+ }
+
+ /**
+ * Format the elapsed time
+ */
+ public String format() {
+ if (elapsed < 1000) {
+ return String.valueOf(elapsed) + " ms";
+ }
+ return String.format("%.2f", ((float) elapsed) / 1000) + " sec";
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/Utils.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/Utils.java
new file mode 100644
index 00000000000000..e6694f669374e6
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/Utils.java
@@ -0,0 +1,330 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Utils.java
+// and modified by Doris
+
+package org.apache.doris.plsql;
+
+import java.sql.Date;
+import java.sql.Timestamp;
+
+public class Utils {
+
+ /**
+ * Unquote string and remove escape characters inside the script
+ */
+ public static String unquoteString(String s) {
+ if (s == null) {
+ return null;
+ }
+
+ int len = s.length();
+ StringBuilder s2 = new StringBuilder(len);
+
+ for (int i = 0; i < len; i++) {
+ char ch = s.charAt(i);
+ char ch2 = (i < len - 1) ? s.charAt(i + 1) : 0;
+
+ if ((i == 0 || i == len - 1) && (ch == '\'' || ch == '"')) {
+ continue;
+ } else
+ // \' and '' escape sequences
+ if ((ch == '\\' && ch2 == '\'') || (ch == '\'' && ch2 == '\'')) {
+ continue;
+ }
+
+ s2.append(ch);
+ }
+
+ return s2.toString();
+ }
+
+ /**
+ * Quote string and escape characters - ab'c -> 'ab''c'
+ */
+ public static String quoteString(String s) {
+ if (s == null) {
+ return null;
+ }
+ int len = s.length();
+ StringBuilder s2 = new StringBuilder(len + 2).append('\'');
+
+ for (int i = 0; i < len; i++) {
+ char ch = s.charAt(i);
+ s2.append(ch);
+ if (ch == '\'') {
+ s2.append(ch);
+ }
+ }
+ s2.append('\'');
+ return s2.toString();
+ }
+
+ /**
+ * Merge quoted strings: 'a' 'b' -> 'ab'; 'a''b' 'c' -> 'a''bc'
+ */
+ public static String mergeQuotedStrings(String s1, String s2) {
+ if (s1 == null || s2 == null) {
+ return null;
+ }
+
+ int len1 = s1.length();
+ int len2 = s2.length();
+
+ if (len1 == 0 || len2 == 0) {
+ return s1;
+ }
+
+ return s1.substring(0, len1 - 1) + s2.substring(1);
+ }
+
+ /**
+ * Convert String to Date
+ */
+ public static Date toDate(String s) {
+ int len = s.length();
+ if (len >= 10) {
+ int c4 = s.charAt(4);
+ int c7 = s.charAt(7);
+ // YYYY-MM-DD
+ if (c4 == '-' && c7 == '-') {
+ return Date.valueOf(s.substring(0, 10));
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Convert String to Timestamp
+ */
+ public static Timestamp toTimestamp(String s) {
+ int len = s.length();
+ if (len >= 10) {
+ int c4 = s.charAt(4);
+ int c7 = s.charAt(7);
+ // YYYY-MM-DD
+ if (c4 == '-' && c7 == '-') {
+ // Convert DB2 syntax: YYYY-MM-DD-HH.MI.SS.FFF
+ if (len > 19) {
+ if (s.charAt(10) == '-') {
+ String s2 = s.substring(0, 10) + ' ' + s.substring(11, 13) + ':' + s.substring(14, 16) + ':'
+ + s.substring(17);
+ return Timestamp.valueOf(s2);
+ }
+ } else if (len == 10) {
+ s += " 00:00:00.000";
+ }
+ return Timestamp.valueOf(s);
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Compare two String values and return min or max
+ */
+ public static String minMaxString(String s1, String s2, boolean max) {
+ if (s1 == null) {
+ return s2;
+ } else if (s2 == null) {
+ return s1;
+ }
+ int cmp = s1.compareTo(s2);
+ if ((max && cmp < 0) || (!max && cmp > 0)) {
+ return s2;
+ }
+ return s1;
+ }
+
+ /**
+ * Compare two Int values and return min or max
+ */
+ public static Long minMaxInt(Long i1, String s, boolean max) {
+ Long i2 = null;
+ try {
+ i2 = Long.parseLong(s);
+ } catch (NumberFormatException ignored) {
+ // ignored
+ }
+ if (i1 == null) {
+ return i2;
+ } else if (i2 == null) {
+ return i1;
+ }
+ if ((max && i1.longValue() < i2.longValue()) || (!max && i1.longValue() > i2.longValue())) {
+ return i2;
+ }
+ return i1;
+ }
+
+ /**
+ * Compare two Date values and return min or max
+ */
+ public static Date minMaxDate(Date d1, String s, boolean max) {
+ Date d2 = Utils.toDate(s);
+ if (d1 == null) {
+ return d2;
+ } else if (d2 == null) {
+ return d1;
+ }
+ if ((max && d1.before(d2)) || (!max && d1.after(d2))) {
+ return d2;
+ }
+ return d1;
+ }
+
+ /**
+ * Convert String array to a string with the specified delimiter
+ */
+ public static String toString(String[] a, char del) {
+ StringBuilder s = new StringBuilder();
+ for (int i = 0; i < a.length; i++) {
+ if (i > 0) {
+ s.append(del);
+ }
+ s.append(a[i]);
+ }
+ return s.toString();
+ }
+
+ /**
+ * Convert SQL datetime format string to Java SimpleDateFormat
+ */
+ public static String convertSqlDatetimeFormat(String in) {
+ StringBuilder out = new StringBuilder();
+ int len = in.length();
+ int i = 0;
+ while (i < len) {
+ if (i + 4 <= len && in.substring(i, i + 4).compareTo("YYYY") == 0) {
+ out.append("yyyy");
+ i += 4;
+ } else if (i + 2 <= len && in.substring(i, i + 2).compareTo("mm") == 0) {
+ out.append("MM");
+ i += 2;
+ } else if (i + 2 <= len && in.substring(i, i + 2).compareTo("DD") == 0) {
+ out.append("dd");
+ i += 2;
+ } else if (i + 4 <= len && in.substring(i, i + 4).compareToIgnoreCase("HH24") == 0) {
+ out.append("HH");
+ i += 4;
+ } else if (i + 2 <= len && in.substring(i, i + 2).compareToIgnoreCase("MI") == 0) {
+ out.append("mm");
+ i += 2;
+ } else if (i + 2 <= len && in.substring(i, i + 2).compareTo("SS") == 0) {
+ out.append("ss");
+ i += 2;
+ } else {
+ out.append(in.charAt(i));
+ i++;
+ }
+ }
+ return out.toString();
+ }
+
+ /**
+ * Get the executable directory
+ */
+ public static String getExecDir() {
+ String dir = Plsql.class.getProtectionDomain().getCodeSource().getLocation().getPath();
+ if (dir.endsWith(".jar")) {
+ dir = dir.substring(0, dir.lastIndexOf("/") + 1);
+ }
+ return dir;
+ }
+
+ /**
+ * Format size value specified in bytes
+ */
+ public static String formatSizeInBytes(long bytes, String postfix) {
+ String out;
+ if (bytes == 1) {
+ out = bytes + " byte";
+ } else if (bytes < 1024) {
+ out = bytes + " bytes";
+ } else if (bytes < 1024 * 1024) {
+ out = String.format("%.1f", ((float) bytes) / 1024) + " KB";
+ } else if (bytes < 1024 * 1024 * 1024) {
+ out = String.format("%.1f", ((float) bytes) / (1024 * 1024)) + " MB";
+ } else {
+ out = String.format("%.1f", ((float) bytes) / (1024 * 1024 * 1024)) + " GB";
+ }
+ if (postfix != null && !postfix.isEmpty()) {
+ out += postfix;
+ }
+ return out;
+ }
+
+ public static String formatSizeInBytes(long bytes) {
+ return Utils.formatSizeInBytes(bytes, null);
+ }
+
+ /**
+ * Format elasped time
+ */
+ public static String formatTime(long msElapsed) {
+ if (msElapsed < 60000) {
+ return msElapsed / 1000 + " sec";
+ } else if (msElapsed < 60000 * 60) {
+ return msElapsed / 60000 + " min " + (msElapsed % 60000) / 1000 + " sec";
+ }
+ return "";
+ }
+
+ /**
+ * Format bytes per second rate
+ */
+ public static String formatBytesPerSec(long bytes, long msElapsed) {
+ if (msElapsed < 30) {
+ return "n/a";
+ }
+ float bytesPerSec = ((float) bytes) / msElapsed * 1000;
+ return Utils.formatSizeInBytes((long) bytesPerSec, "/sec");
+ }
+
+ /**
+ * Format percentage
+ */
+ public static String formatPercent(long current, long all) {
+ return String.format("%.1f", ((float) current) / all * 100) + "%";
+ }
+
+ /**
+ * Format count
+ */
+ public static String formatCnt(long value, String suffix) {
+ if (value == 1) {
+ return value + " " + suffix;
+ }
+ return value + " " + suffix + "s";
+ }
+
+ public static String formatCnt(long value, String suffix, String suffix2) {
+ if (value == 1) {
+ return value + " " + suffix;
+ }
+ return value + " " + suffix2;
+ }
+
+ /**
+ * Note. This stub is to resolve name conflict with ANTLR generated source using
+ * org.antlr.v4.runtime.misc.Utils.join
+ */
+ static String join(T[] array, String separator) {
+ return org.antlr.v4.runtime.misc.Utils.join(array, separator);
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/Var.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/Var.java
new file mode 100644
index 00000000000000..879191f8fa8e12
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/Var.java
@@ -0,0 +1,634 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/Var.java
+// and modified by Doris
+
+package org.apache.doris.plsql;
+
+import org.apache.doris.common.AnalysisException;
+import org.apache.doris.nereids.trees.expressions.literal.Literal;
+import org.apache.doris.plsql.exception.TypeException;
+import org.apache.doris.plsql.executor.QueryResult;
+
+import java.math.BigDecimal;
+import java.math.RoundingMode;
+import java.sql.Date;
+import java.sql.Timestamp;
+import java.util.ArrayList;
+
+/**
+ * Variable or the result of expression
+ */
+public class Var {
+ // Data types
+ public enum Type {
+ BOOL, CURSOR, DATE, DECIMAL, DERIVED_TYPE, DERIVED_ROWTYPE, DOUBLE, FILE, IDENT, BIGINT, INTERVAL, ROW,
+ RS_LOCATOR, STRING, STRINGLIST, TIMESTAMP, NULL, PL_OBJECT
+ }
+
+ public static final String DERIVED_TYPE = "DERIVED%TYPE";
+ public static final String DERIVED_ROWTYPE = "DERIVED%ROWTYPE";
+ public static Var Empty = new Var();
+ public static Var Null = new Var(Type.NULL);
+
+ public String name;
+ public Type type;
+ public Object value;
+
+ int len;
+ int scale;
+
+ boolean constant = false;
+
+ public Var() {
+ type = Type.NULL;
+ }
+
+ public Var(Var var) {
+ name = var.name;
+ type = var.type;
+ value = var.value;
+ len = var.len;
+ scale = var.scale;
+ }
+
+ public Var(Long value) {
+ this.type = Type.BIGINT;
+ this.value = value;
+ }
+
+ public Var(BigDecimal value) {
+ this.type = Type.DECIMAL;
+ this.value = value;
+ }
+
+ public Var(String name, Long value) {
+ this.type = Type.BIGINT;
+ this.name = name;
+ this.value = value;
+ }
+
+ public Var(String value) {
+ this.type = Type.STRING;
+ this.value = value;
+ }
+
+ public Var(Double value) {
+ this.type = Type.DOUBLE;
+ this.value = value;
+ }
+
+ public Var(Date value) {
+ this.type = Type.DATE;
+ this.value = value;
+ }
+
+ public Var(Timestamp value, int scale) {
+ this.type = Type.TIMESTAMP;
+ this.value = value;
+ this.scale = scale;
+ }
+
+ public Var(Interval value) {
+ this.type = Type.INTERVAL;
+ this.value = value;
+ }
+
+ public Var(ArrayList value) {
+ this.type = Type.STRINGLIST;
+ this.value = value;
+ }
+
+ public Var(Boolean b) {
+ type = Type.BOOL;
+ value = b;
+ }
+
+ public Var(String name, Row row) {
+ this.name = name;
+ this.type = Type.ROW;
+ this.value = new Row(row);
+ }
+
+ public Var(Type type, String name) {
+ this.type = type;
+ this.name = name;
+ }
+
+ public Var(Type type, Object value) {
+ this.type = type;
+ this.value = value;
+ }
+
+ public Var(String name, Type type, Object value) {
+ this.name = name;
+ this.type = type;
+ this.value = value;
+ }
+
+ public Var(Type type) {
+ this.type = type;
+ }
+
+ public Var(String name, String type, Integer len, Integer scale, Var def) {
+ this.name = name;
+ setType(type);
+ if (len != null) {
+ this.len = len;
+ }
+ if (scale != null) {
+ this.scale = scale;
+ }
+ if (def != null) {
+ cast(def);
+ }
+ }
+
+ public Var(String name, String type, String len, String scale, Var def) {
+ this(name, type, len != null ? Integer.parseInt(len) : null, scale != null ? Integer.parseInt(scale) : null,
+ def);
+ }
+
+ /**
+ * Cast a new value to the variable
+ */
+ public Var cast(Var val) {
+ try {
+ if (constant) {
+ return this;
+ } else if (val.value instanceof Literal) { // At first, ignore type
+ value = val.value;
+ } else if (val == null || val.value == null) {
+ value = null;
+ } else if (type == Type.DERIVED_TYPE) {
+ type = val.type;
+ value = val.value;
+ } else if (type == val.type && type == Type.STRING) {
+ cast((String) val.value);
+ } else if (type == val.type) {
+ value = val.value;
+ } else if (type == Type.STRING) {
+ cast(val.toString());
+ } else if (type == Type.BIGINT) {
+ if (val.type == Type.STRING) {
+ value = Long.parseLong((String) val.value);
+ } else if (val.type == Type.DECIMAL) {
+ value = ((BigDecimal) val.value).longValue();
+ }
+ } else if (type == Type.DECIMAL) {
+ if (val.type == Type.STRING) {
+ value = new BigDecimal((String) val.value);
+ } else if (val.type == Type.BIGINT) {
+ value = BigDecimal.valueOf(val.longValue());
+ } else if (val.type == Type.DOUBLE) {
+ value = BigDecimal.valueOf(val.doubleValue());
+ }
+ } else if (type == Type.DOUBLE) {
+ if (val.type == Type.STRING) {
+ value = Double.valueOf((String) val.value);
+ } else if (val.type == Type.BIGINT || val.type == Type.DECIMAL) {
+ value = Double.valueOf(val.doubleValue());
+ }
+ } else if (type == Type.DATE) {
+ value = org.apache.doris.plsql.Utils.toDate(val.toString());
+ } else if (type == Type.TIMESTAMP) {
+ value = org.apache.doris.plsql.Utils.toTimestamp(val.toString());
+ }
+ } catch (NumberFormatException e) {
+ throw new TypeException(null, type, val.type, val.value);
+ }
+ return this;
+ }
+
+ public Literal toLiteral() {
+ if (value instanceof Literal) {
+ return (Literal) value;
+ } else {
+ return Literal.of(value);
+ }
+ }
+
+ /**
+ * Cast a new string value to the variable
+ */
+ public Var cast(String val) {
+ if (!constant && type == Type.STRING) {
+ if (len != 0) {
+ int l = val.length();
+ if (l > len) {
+ value = val.substring(0, len);
+ return this;
+ }
+ }
+ value = val;
+ }
+ return this;
+ }
+
+ /**
+ * Set the new value
+ */
+ public void setValue(String str) {
+ if (!constant && type == Type.STRING) {
+ value = str;
+ }
+ }
+
+ public Var setValue(Long val) {
+ if (!constant && type == Type.BIGINT) {
+ value = val;
+ }
+ return this;
+ }
+
+ public Var setValue(Boolean val) {
+ if (!constant && type == Type.BOOL) {
+ value = val;
+ }
+ return this;
+ }
+
+ public void setValue(Object value) {
+ if (!constant) {
+ this.value = value;
+ }
+ }
+
+ public Var setValue(QueryResult queryResult, int idx) throws AnalysisException {
+ if (queryResult.jdbcType(idx) == Integer.MIN_VALUE) {
+ value = queryResult.column(idx);
+ } else { // JdbcQueryExecutor
+ int type = queryResult.jdbcType(idx);
+ if (type == java.sql.Types.CHAR || type == java.sql.Types.VARCHAR) {
+ cast(new Var(queryResult.column(idx, String.class)));
+ } else if (type == java.sql.Types.INTEGER || type == java.sql.Types.BIGINT
+ || type == java.sql.Types.SMALLINT || type == java.sql.Types.TINYINT) {
+ cast(new Var(queryResult.column(idx, Long.class)));
+ } else if (type == java.sql.Types.DECIMAL || type == java.sql.Types.NUMERIC) {
+ cast(new Var(queryResult.column(idx, BigDecimal.class)));
+ } else if (type == java.sql.Types.FLOAT || type == java.sql.Types.DOUBLE) {
+ cast(new Var(queryResult.column(idx, Double.class)));
+ }
+ }
+ return this;
+ }
+
+ public Var setRowValues(QueryResult queryResult) throws AnalysisException {
+ Row row = (Row) this.value;
+ int idx = 0;
+ for (Column column : row.getColumns()) {
+ Var var = new Var(column.getName(), column.getType(), (Integer) null, null, null);
+ var.setValue(queryResult, idx);
+ column.setValue(var);
+ idx++;
+ }
+ return this;
+ }
+
+ /**
+ * Set the data type from string representation
+ */
+ public void setType(String type) {
+ this.type = defineType(type);
+ }
+
+ /**
+ * Set the data type from JDBC type code
+ */
+ void setType(int type) {
+ this.type = defineType(type);
+ }
+
+ /**
+ * Set the variable as constant
+ */
+ void setConstant(boolean constant) {
+ this.constant = constant;
+ }
+
+ /**
+ * Define the data type from string representation
+ * from hive type to plsql var type
+ */
+ public static Type defineType(String type) {
+ if (type == null) {
+ return Type.NULL;
+ } else if (type.equalsIgnoreCase("INT") || type.equalsIgnoreCase("INTEGER") || type.equalsIgnoreCase("BIGINT")
+ || type.equalsIgnoreCase("SMALLINT") || type.equalsIgnoreCase("TINYINT")
+ || type.equalsIgnoreCase("BINARY_INTEGER") || type.equalsIgnoreCase("PLS_INTEGER")
+ || type.equalsIgnoreCase("SIMPLE_INTEGER") || type.equalsIgnoreCase("INT2")
+ || type.equalsIgnoreCase("INT4") || type.equalsIgnoreCase("INT8")) {
+ return Type.BIGINT;
+ } else if (type.equalsIgnoreCase("CHAR") || type.equalsIgnoreCase("VARCHAR") || type.equalsIgnoreCase(
+ "VARCHAR2")
+ || type.equalsIgnoreCase("STRING") || type.equalsIgnoreCase("XML")
+ || type.equalsIgnoreCase("CHARACTER")) {
+ return Type.STRING;
+ } else if (type.equalsIgnoreCase("DEC") || type.equalsIgnoreCase("DECIMAL") || type.equalsIgnoreCase("NUMERIC")
+ ||
+ type.equalsIgnoreCase("NUMBER")) {
+ return Type.DECIMAL;
+ } else if (type.equalsIgnoreCase("REAL") || type.equalsIgnoreCase("FLOAT") || type.toUpperCase()
+ .startsWith("DOUBLE") || type.equalsIgnoreCase("BINARY_FLOAT")
+ || type.toUpperCase().startsWith("BINARY_DOUBLE") || type.equalsIgnoreCase("SIMPLE_FLOAT")
+ || type.toUpperCase().startsWith("SIMPLE_DOUBLE")) {
+ return Type.DOUBLE;
+ } else if (type.equalsIgnoreCase("DATE")) {
+ return Type.DATE;
+ } else if (type.equalsIgnoreCase("TIMESTAMP")) {
+ return Type.TIMESTAMP;
+ } else if (type.equalsIgnoreCase("BOOL") || type.equalsIgnoreCase("BOOLEAN")) {
+ return Type.BOOL;
+ } else if (type.equalsIgnoreCase("SYS_REFCURSOR")) {
+ return Type.CURSOR;
+ } else if (type.equalsIgnoreCase("UTL_FILE.FILE_TYPE")) {
+ return Type.FILE;
+ } else if (type.toUpperCase().startsWith("RESULT_SET_LOCATOR")) {
+ return Type.RS_LOCATOR;
+ } else if (type.equalsIgnoreCase(Var.DERIVED_TYPE)) {
+ return Type.DERIVED_TYPE;
+ } else if (type.equalsIgnoreCase(Type.PL_OBJECT.name())) {
+ return Type.PL_OBJECT;
+ } else if (type.equalsIgnoreCase(Type.ROW.name())) {
+ return Type.ROW;
+ }
+ return Type.NULL;
+ }
+
+ /**
+ * Define the data type from JDBC type code
+ */
+ public static Type defineType(int type) {
+ if (type == java.sql.Types.CHAR || type == java.sql.Types.VARCHAR) {
+ return Type.STRING;
+ } else if (type == java.sql.Types.INTEGER || type == java.sql.Types.BIGINT) {
+ return Type.BIGINT;
+ }
+ return Type.NULL;
+ }
+
+ /**
+ * Remove value
+ */
+ public void removeValue() {
+ type = Type.NULL;
+ name = null;
+ value = null;
+ len = 0;
+ scale = 0;
+ }
+
+ /**
+ * Compare values
+ */
+ @Override
+ public boolean equals(Object obj) {
+ if (getClass() != obj.getClass()) {
+ return false;
+ }
+ Var var = (Var) obj;
+ if (this == var) {
+ return true;
+ } else if (var == null || var.value == null || this.value == null) {
+ return false;
+ }
+ if (type == Type.BIGINT) {
+ if (var.type == Type.BIGINT && ((Long) value).longValue() == ((Long) var.value).longValue()) {
+ return true;
+ } else if (var.type == Type.DECIMAL) {
+ return equals((BigDecimal) var.value, (Long) value);
+ }
+ } else if (type == Type.STRING && var.type == Type.STRING && value.equals(var.value)) {
+ return true;
+ } else if (type == Type.DECIMAL && var.type == Type.DECIMAL
+ && ((BigDecimal) value).compareTo((BigDecimal) var.value) == 0) {
+ return true;
+ } else if (type == Type.DOUBLE) {
+ if (var.type == Type.DOUBLE && ((Double) value).compareTo((Double) var.value) == 0) {
+ return true;
+ } else if (var.type == Type.DECIMAL
+ && ((Double) value).compareTo(((BigDecimal) var.value).doubleValue()) == 0) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ /**
+ * Check if variables of different data types are equal
+ */
+ public boolean equals(BigDecimal d, Long i) {
+ return d.compareTo(new BigDecimal(i)) == 0;
+ }
+
+ /**
+ * Compare values
+ */
+ public int compareTo(Var v) {
+ if (this == v) {
+ return 0;
+ } else if (v == null) {
+ return -1;
+ } else if (type == Type.BIGINT && v.type == Type.BIGINT) {
+ return ((Long) value).compareTo((Long) v.value);
+ } else if (type == Type.DOUBLE && v.type == Type.DECIMAL) {
+ return (new BigDecimal((double) value)).compareTo((BigDecimal) v.value);
+ } else if (type == Type.STRING && v.type == Type.STRING) {
+ return ((String) value).compareTo((String) v.value);
+ }
+ return -1;
+ }
+
+ /**
+ * Calculate difference between values in percent
+ */
+ public BigDecimal percentDiff(Var var) {
+ BigDecimal d1 = new Var(Var.Type.DECIMAL).cast(this).decimalValue();
+ BigDecimal d2 = new Var(Var.Type.DECIMAL).cast(var).decimalValue();
+ if (d1 != null && d2 != null) {
+ if (d1.compareTo(BigDecimal.ZERO) != 0) {
+ return d1.subtract(d2).abs().multiply(new BigDecimal(100)).divide(d1, 2, RoundingMode.HALF_UP);
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Increment an integer value
+ */
+ public Var increment(long i) {
+ if (type == Type.BIGINT) {
+ value = Long.valueOf(((Long) value).longValue() + i);
+ }
+ return this;
+ }
+
+ /**
+ * Decrement an integer value
+ */
+ public Var decrement(long i) {
+ if (type == Type.BIGINT) {
+ value = Long.valueOf(((Long) value).longValue() - i);
+ }
+ return this;
+ }
+
+ /**
+ * Return an integer value
+ */
+ public int intValue() {
+ if (type == Type.BIGINT) {
+ return ((Long) value).intValue();
+ } else if (type == Type.STRING) {
+ return Integer.parseInt((String) value);
+ }
+ throw new NumberFormatException();
+ }
+
+ /**
+ * Return a long integer value
+ */
+ public long longValue() {
+ if (type == Type.BIGINT) {
+ return ((Long) value).longValue();
+ }
+ throw new NumberFormatException();
+ }
+
+ /**
+ * Return a decimal value
+ */
+ public BigDecimal decimalValue() {
+ if (type == Type.DECIMAL) {
+ return (BigDecimal) value;
+ }
+ throw new NumberFormatException();
+ }
+
+ /**
+ * Return a double value
+ */
+ public double doubleValue() {
+ if (type == Type.DOUBLE) {
+ return ((Double) value).doubleValue();
+ } else if (type == Type.BIGINT) {
+ return ((Long) value).doubleValue();
+ } else if (type == Type.DECIMAL) {
+ return ((BigDecimal) value).doubleValue();
+ }
+ throw new NumberFormatException();
+ }
+
+ /**
+ * Return true/false for BOOL type
+ */
+ public boolean isTrue() {
+ if (type == Type.BOOL && value != null) {
+ return ((Boolean) value).booleanValue();
+ }
+ return false;
+ }
+
+ /**
+ * Negate the value
+ */
+ public void negate() {
+ if (value == null) {
+ return;
+ }
+ if (type == Type.BOOL) {
+ boolean v = ((Boolean) value).booleanValue();
+ value = Boolean.valueOf(!v);
+ } else if (type == Type.DECIMAL) {
+ BigDecimal v = (BigDecimal) value;
+ value = v.negate();
+ } else if (type == Type.DOUBLE) {
+ Double v = (Double) value;
+ value = -v;
+ } else if (type == Type.BIGINT) {
+ Long v = (Long) value;
+ value = -v;
+ } else {
+ throw new NumberFormatException("invalid type " + type);
+ }
+ }
+
+ /**
+ * Check if the variable contains NULL
+ */
+ public boolean isNull() {
+ return type == Type.NULL || value == null;
+ }
+
+ /**
+ * Convert value to String
+ */
+ @Override
+ public String toString() {
+ if (value instanceof Literal) {
+ return value.toString();
+ } else if (type == Type.IDENT) {
+ return name;
+ } else if (value == null) {
+ return null;
+ } else if (type == Type.BIGINT) {
+ return ((Long) value).toString();
+ } else if (type == Type.STRING) {
+ return (String) value;
+ } else if (type == Type.DATE) {
+ return ((Date) value).toString();
+ } else if (type == Type.TIMESTAMP) {
+ int len = 19;
+ String t = ((Timestamp) value).toString(); // .0 returned if the fractional part not set
+ if (scale > 0) {
+ len += scale + 1;
+ }
+ if (t.length() > len) {
+ t = t.substring(0, len);
+ }
+ return t;
+ }
+ return value.toString();
+ }
+
+ /**
+ * Convert value to SQL string - string literals are quoted and escaped, ab'c -> 'ab''c'
+ */
+ public String toSqlString() {
+ if (value == null) {
+ return "NULL";
+ } else if (type == Type.STRING) {
+ return org.apache.doris.plsql.Utils.quoteString((String) value);
+ }
+ return toString();
+ }
+
+ /**
+ * Set variable name
+ */
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ /**
+ * Get variable name
+ */
+ public String getName() {
+ return name;
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/exception/ArityException.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/exception/ArityException.java
new file mode 100644
index 00000000000000..b8e5a964cf3d83
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/exception/ArityException.java
@@ -0,0 +1,34 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/ArityException.java
+// and modified by Doris
+
+package org.apache.doris.plsql.exception;
+
+import org.antlr.v4.runtime.ParserRuleContext;
+
+public class ArityException extends PlValidationException {
+ public ArityException(ParserRuleContext ctx, String procName, int formalCount, int actualCount) {
+ super(ctx, "wrong number of arguments in call to '" + procName
+ + "'. Expected " + formalCount + " got " + actualCount + ".");
+ }
+
+ public ArityException(ParserRuleContext ctx, String message) {
+ super(ctx, message);
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/exception/NoSuchPlMethodException.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/exception/NoSuchPlMethodException.java
new file mode 100644
index 00000000000000..838ea2d4124a67
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/exception/NoSuchPlMethodException.java
@@ -0,0 +1,29 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/NoSuchHplMethodException.java
+// and modified by Doris
+
+package org.apache.doris.plsql.exception;
+
+import org.antlr.v4.runtime.ParserRuleContext;
+
+public class NoSuchPlMethodException extends PlValidationException {
+ public NoSuchPlMethodException(ParserRuleContext ctx, String message) {
+ super(ctx, message);
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/exception/PlValidationException.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/exception/PlValidationException.java
new file mode 100644
index 00000000000000..59ecf4a1efad97
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/exception/PlValidationException.java
@@ -0,0 +1,36 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/HplValidationException.java
+// and modified by Doris
+
+package org.apache.doris.plsql.exception;
+
+import org.antlr.v4.runtime.ParserRuleContext;
+
+public class PlValidationException extends RuntimeException {
+ private final ParserRuleContext ctx;
+
+ public PlValidationException(ParserRuleContext ctx, String message) {
+ super(message);
+ this.ctx = ctx;
+ }
+
+ public ParserRuleContext getCtx() {
+ return ctx;
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/exception/QueryException.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/exception/QueryException.java
new file mode 100644
index 00000000000000..39822b84be9374
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/exception/QueryException.java
@@ -0,0 +1,41 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/executor/QueryException.java
+// and modified by Doris
+
+package org.apache.doris.plsql.exception;
+
+import java.sql.SQLException;
+
+public class QueryException extends RuntimeException {
+ public QueryException(Throwable cause) {
+ super(cause);
+ }
+
+ public int getErrorCode() {
+ return getCause() instanceof SQLException
+ ? ((SQLException) getCause()).getErrorCode()
+ : -1;
+ }
+
+ public String getSQLState() {
+ return getCause() instanceof SQLException
+ ? ((SQLException) getCause()).getSQLState()
+ : "02000";
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/exception/TypeException.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/exception/TypeException.java
new file mode 100644
index 00000000000000..051cb2c2114202
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/exception/TypeException.java
@@ -0,0 +1,39 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/TypeException.java
+// and modified by Doris
+
+package org.apache.doris.plsql.exception;
+
+import org.apache.doris.plsql.Var.Type;
+
+import org.antlr.v4.runtime.ParserRuleContext;
+
+public class TypeException extends PlValidationException {
+ public TypeException(ParserRuleContext ctx, Type expectedType, Type actualType, Object value) {
+ super(ctx, "cannot convert '" + value + "' with type " + actualType + " to " + expectedType);
+ }
+
+ public TypeException(ParserRuleContext ctx, Class> expectedType, Type actualType, Object value) {
+ super(ctx, "cannot convert '" + value + "' with type " + actualType + " to " + expectedType);
+ }
+
+ public TypeException(ParserRuleContext ctx, String message) {
+ super(ctx, message);
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/exception/UndefinedIdentException.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/exception/UndefinedIdentException.java
new file mode 100644
index 00000000000000..1fb9f1ed271e46
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/exception/UndefinedIdentException.java
@@ -0,0 +1,29 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/UndefinedIdentException.java
+// and modified by Doris
+
+package org.apache.doris.plsql.exception;
+
+import org.antlr.v4.runtime.ParserRuleContext;
+
+public class UndefinedIdentException extends PlValidationException {
+ public UndefinedIdentException(ParserRuleContext ctx, String ident) {
+ super(ctx, "identifier '" + ident + "' must be declared.");
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/executor/ColumnMeta.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/executor/ColumnMeta.java
new file mode 100644
index 00000000000000..77fd95e32f18d4
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/executor/ColumnMeta.java
@@ -0,0 +1,57 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/executor/ColumnMeta.java
+// and modified by Doris
+
+package org.apache.doris.plsql.executor;
+
+import org.apache.doris.catalog.Type;
+
+public class ColumnMeta {
+ private final String columnName;
+ private final String typeName;
+ private final int jdbcType;
+ private final Type dorisType;
+
+ public ColumnMeta(String columnName, String typeName, int jdbcType) {
+ this(columnName, typeName, jdbcType, Type.INVALID);
+ }
+
+ public ColumnMeta(String columnName, String typeName, int jdbcType, Type dorisType) {
+ this.columnName = columnName;
+ this.typeName = typeName;
+ this.jdbcType = jdbcType;
+ this.dorisType = dorisType;
+ }
+
+ public String getColumnName() {
+ return columnName;
+ }
+
+ public String getTypeName() {
+ return typeName;
+ }
+
+ public int getJdbcType() {
+ return jdbcType;
+ }
+
+ public Type getDorisType() {
+ return dorisType;
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/executor/DorisRowResult.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/executor/DorisRowResult.java
new file mode 100644
index 00000000000000..f087a7e17ccdce
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/executor/DorisRowResult.java
@@ -0,0 +1,145 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package org.apache.doris.plsql.executor;
+
+import org.apache.doris.analysis.LiteralExpr;
+import org.apache.doris.catalog.Type;
+import org.apache.doris.common.AnalysisException;
+import org.apache.doris.nereids.trees.expressions.literal.Literal;
+import org.apache.doris.plsql.exception.QueryException;
+import org.apache.doris.qe.Coordinator;
+import org.apache.doris.qe.RowBatch;
+import org.apache.doris.statistics.util.InternalQueryBuffer;
+
+import java.nio.ByteBuffer;
+import java.util.List;
+
+// only running form mysql client
+public class DorisRowResult implements RowResult {
+
+ private Coordinator coord;
+
+ private List columnNames;
+
+ private List dorisTypes;
+
+ private RowBatch batch;
+
+ private int index;
+
+ private boolean isLazyLoading;
+
+ private boolean eof;
+
+ private Object[] current;
+
+ public DorisRowResult(Coordinator coord, List columnNames, List dorisTypes) {
+ this.coord = coord;
+ this.columnNames = columnNames;
+ this.dorisTypes = dorisTypes;
+ this.current = columnNames != null ? new Object[columnNames.size()] : null;
+ this.isLazyLoading = false;
+ this.eof = false;
+ }
+
+ @Override
+ public boolean next() {
+ if (eof) {
+ return false;
+ }
+ try {
+ if (batch == null || batch.getBatch() == null
+ || index == batch.getBatch().getRowsSize() - 1) {
+ batch = coord.getNext();
+ index = 0;
+ if (batch.isEos()) {
+ eof = true;
+ return false;
+ }
+ } else {
+ ++index;
+ }
+ isLazyLoading = true;
+ } catch (Exception e) {
+ throw new QueryException(e);
+ }
+ return true;
+ }
+
+ @Override
+ public void close() {
+ // TODO
+ }
+
+ @Override
+ public T get(int columnIndex, Class type) throws AnalysisException {
+ if (isLazyLoading) {
+ readFromDorisType(batch.getBatch().getRows().get(index));
+ isLazyLoading = false;
+ }
+ if (current[columnIndex] == null) {
+ return null;
+ }
+ current[columnIndex] = ((Literal) current[columnIndex]).getValue();
+ if (type.isInstance(current[columnIndex])) {
+ return (T) current[columnIndex];
+ } else {
+ if (current[columnIndex] instanceof Number) {
+ if (type.equals(Long.class)) {
+ return type.cast(((Number) current[columnIndex]).longValue());
+ } else if (type.equals(Integer.class)) {
+ return type.cast(((Number) current[columnIndex]).intValue());
+ } else if (type.equals(Short.class)) {
+ return type.cast(((Number) current[columnIndex]).shortValue());
+ } else if (type.equals(Byte.class)) {
+ return type.cast(((Number) current[columnIndex]).byteValue());
+ }
+ }
+ throw new ClassCastException(current[columnIndex].getClass() + " cannot be casted to " + type);
+ }
+ }
+
+ @Override
+ public Literal get(int columnIndex) throws AnalysisException {
+ if (isLazyLoading) {
+ readFromDorisType(batch.getBatch().getRows().get(index));
+ isLazyLoading = false;
+ }
+ if (current[columnIndex] == null) {
+ return null;
+ }
+ return (Literal) current[columnIndex];
+ }
+
+ @Override
+ public ByteBuffer getMysqlRow() {
+ return batch.getBatch().getRows().get(index);
+ }
+
+ private void readFromDorisType(ByteBuffer buffer) throws AnalysisException {
+ InternalQueryBuffer queryBuffer = new InternalQueryBuffer(buffer.slice());
+ for (int i = 0; i < columnNames.size(); i++) {
+ String value = queryBuffer.readStringWithLength();
+ if (value == null) {
+ current[i] = Literal.of(null);
+ } else {
+ current[i] = Literal.fromLegacyLiteral(LiteralExpr.create(value, dorisTypes.get(i)), dorisTypes.get(i));
+ }
+ }
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/executor/JdbcQueryExecutor.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/executor/JdbcQueryExecutor.java
new file mode 100644
index 00000000000000..07ff17aacf21f4
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/executor/JdbcQueryExecutor.java
@@ -0,0 +1,115 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/executor/JdbcQueryExecutor.java
+// and modified by Doris
+
+package org.apache.doris.plsql.executor;
+
+import org.apache.doris.common.AnalysisException;
+import org.apache.doris.nereids.trees.expressions.literal.Literal;
+import org.apache.doris.plsql.Exec;
+import org.apache.doris.plsql.Query;
+import org.apache.doris.plsql.exception.QueryException;
+
+import org.antlr.v4.runtime.ParserRuleContext;
+
+import java.nio.ByteBuffer;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+public class JdbcQueryExecutor implements QueryExecutor {
+ private final Exec exec;
+
+ public JdbcQueryExecutor(Exec exec) {
+ this.exec = exec;
+ }
+
+ @Override
+ public QueryResult executeQuery(String sql, ParserRuleContext ctx) {
+ String conn = exec.getStatementConnection();
+ Query query = exec.executeQuery(ctx, new Query(sql), conn);
+ ResultSet resultSet = query.getResultSet();
+ if (resultSet == null) { // offline mode
+ return new QueryResult(null, () -> new Metadata(Collections.emptyList()), null, query.getException());
+ } else {
+ return new QueryResult(new JdbcRowResult(resultSet), () -> metadata(resultSet), null, query.getException());
+ }
+ }
+
+ private static Metadata metadata(ResultSet resultSet) {
+ try {
+ ResultSetMetaData meta = resultSet.getMetaData();
+ List colMetas = new ArrayList<>();
+ for (int i = 1; i <= meta.getColumnCount(); i++) {
+ colMetas.add(new ColumnMeta(meta.getColumnName(i), meta.getColumnTypeName(i), meta.getColumnType(i)));
+ }
+ return new Metadata(colMetas);
+ } catch (SQLException e) {
+ throw new QueryException(e);
+ }
+ }
+
+ private static class JdbcRowResult implements org.apache.doris.plsql.executor.RowResult {
+ private final ResultSet resultSet;
+
+ private JdbcRowResult(ResultSet resultSet) {
+ this.resultSet = resultSet;
+ }
+
+ @Override
+ public boolean next() {
+ try {
+ return resultSet.next();
+ } catch (SQLException e) {
+ throw new QueryException(e);
+ }
+ }
+
+ @Override
+ public T get(int columnIndex, Class type) {
+ try {
+ return (T) resultSet.getObject(columnIndex + 1);
+ } catch (SQLException e) {
+ throw new QueryException(e);
+ }
+ }
+
+ @Override
+ public Literal get(int columnIndex) throws AnalysisException {
+ throw new RuntimeException("no support get Doris type result");
+ }
+
+ @Override
+ public ByteBuffer getMysqlRow() {
+ throw new RuntimeException("not implement getMysqlRow method.");
+ }
+
+ @Override
+ public void close() {
+ try {
+ resultSet.close();
+ } catch (SQLException e) {
+ throw new QueryException(e);
+ }
+ }
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/executor/Metadata.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/executor/Metadata.java
new file mode 100644
index 00000000000000..089167e9235a6f
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/executor/Metadata.java
@@ -0,0 +1,57 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/executor/Metadata.java
+// and modified by Doris
+
+package org.apache.doris.plsql.executor;
+
+import org.apache.doris.catalog.Type;
+
+import java.util.List;
+
+public class Metadata {
+ private final List columnMetas;
+
+ public Metadata(List columnMetas) {
+ this.columnMetas = columnMetas;
+ }
+
+ public int columnCount() {
+ return columnMetas.size();
+ }
+
+ public int jdbcType(int columnIndex) {
+ return at(columnIndex).getJdbcType();
+ }
+
+ public String columnName(int columnIndex) {
+ return at(columnIndex).getColumnName();
+ }
+
+ public String columnTypeName(int columnIndex) {
+ return at(columnIndex).getTypeName();
+ }
+
+ public Type dorisType(int columnIndex) {
+ return at(columnIndex).getDorisType();
+ }
+
+ private ColumnMeta at(int columnIndex) {
+ return columnMetas.get(columnIndex);
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/executor/PlSqlOperation.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/executor/PlSqlOperation.java
new file mode 100644
index 00000000000000..525ca612cd3c54
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/executor/PlSqlOperation.java
@@ -0,0 +1,74 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package org.apache.doris.plsql.executor;
+
+import org.apache.doris.common.ErrorCode;
+import org.apache.doris.plsql.Arguments;
+import org.apache.doris.plsql.Conf;
+import org.apache.doris.plsql.Exec;
+import org.apache.doris.qe.ConnectContext;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+public class PlSqlOperation {
+ private static final Logger LOG = LogManager.getLogger(PlSqlOperation.class);
+
+ private final PlsqlResult result;
+
+ private final Exec exec;
+
+ public PlSqlOperation() {
+ result = new PlsqlResult();
+ exec = new Exec(new Conf(), result, new PlsqlQueryExecutor(), result);
+ exec.init();
+ }
+
+ public Exec getExec() {
+ return exec;
+ }
+
+ public void execute(ConnectContext ctx, String statement) {
+ ctx.setRunProcedure(true);
+ ctx.setProcedureExec(exec);
+ result.reset();
+ try {
+ Arguments args = new Arguments();
+ args.parse(new String[] {"-e", statement});
+ exec.parseAndEval(args);
+ // Exception is not thrown after catch.
+ // For example, select a not exist table will return empty results, exception will put into signals.
+ exec.printExceptions();
+ String error = result.getError();
+ String msg = result.getMsg();
+ if (!error.isEmpty()) {
+ ctx.getState().setError("plsql exec error, " + error);
+ } else if (!msg.isEmpty()) {
+ ctx.getState().setOk(0, 0, msg);
+ }
+ ctx.getMysqlChannel().reset();
+ ctx.getState().setOk();
+ ctx.setRunProcedure(false);
+ ctx.setProcedureExec(null);
+ } catch (Exception e) {
+ exec.printExceptions();
+ ctx.getState().setError(ErrorCode.ERR_UNKNOWN_ERROR, result.getError() + " " + e.getMessage());
+ LOG.warn(e);
+ }
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/executor/PlsqlQueryExecutor.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/executor/PlsqlQueryExecutor.java
new file mode 100644
index 00000000000000..7978b4fc5985c4
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/executor/PlsqlQueryExecutor.java
@@ -0,0 +1,80 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package org.apache.doris.plsql.executor;
+
+import org.apache.doris.catalog.MysqlColType;
+import org.apache.doris.catalog.PrimitiveType;
+import org.apache.doris.catalog.Type;
+import org.apache.doris.mysql.MysqlCommand;
+import org.apache.doris.plsql.exception.QueryException;
+import org.apache.doris.qe.AutoCloseConnectContext;
+import org.apache.doris.qe.ConnectContext;
+import org.apache.doris.qe.ConnectProcessor;
+import org.apache.doris.qe.MysqlConnectProcessor;
+import org.apache.doris.qe.StmtExecutor;
+
+import org.antlr.v4.runtime.ParserRuleContext;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+public class PlsqlQueryExecutor implements QueryExecutor {
+ public PlsqlQueryExecutor() {
+ }
+
+ @Override
+ public QueryResult executeQuery(String sql, ParserRuleContext ctx) {
+ // A cursor may correspond to a query, and if the user opens multiple cursors, need to save multiple
+ // query states, so here each query constructs a ConnectProcessor and the ConnectContext shares some data.
+ ConnectContext context = ConnectContext.get().cloneContext();
+ try (AutoCloseConnectContext autoCloseCtx = new AutoCloseConnectContext(context)) {
+ autoCloseCtx.call();
+ context.setRunProcedure(true);
+ ConnectProcessor processor = new MysqlConnectProcessor(context);
+ processor.executeQuery(MysqlCommand.COM_QUERY, sql);
+ StmtExecutor executor = context.getExecutor();
+ if (executor.getParsedStmt().getResultExprs() != null) {
+ return new QueryResult(new DorisRowResult(executor.getCoord(), executor.getColumns(),
+ executor.getReturnTypes()), () -> metadata(executor), processor, null);
+ } else {
+ return new QueryResult(new DorisRowResult(executor.getCoord(), executor.getColumns(), null),
+ null, processor, null);
+ }
+ } catch (Exception e) {
+ return new QueryResult(null, () -> new Metadata(Collections.emptyList()), null, e);
+ }
+ }
+
+ private Metadata metadata(StmtExecutor stmtExecutor) {
+ try {
+ List columns = stmtExecutor.getColumns();
+ List types = stmtExecutor.getReturnTypes();
+ List colMeta = new ArrayList<>();
+ for (int i = 0; i < columns.size(); i++) {
+ PrimitiveType primitiveType = types.get(i).getPrimitiveType();
+ MysqlColType mysqlColType = primitiveType.toMysqlType();
+ colMeta.add(new ColumnMeta(columns.get(i), mysqlColType.getJdbcColumnTypeName(), Integer.MIN_VALUE,
+ types.get(i)));
+ }
+ return new Metadata(colMeta);
+ } catch (Exception e) {
+ throw new QueryException(e);
+ }
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/executor/PlsqlResult.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/executor/PlsqlResult.java
new file mode 100644
index 00000000000000..c632b930a8adc8
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/executor/PlsqlResult.java
@@ -0,0 +1,208 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package org.apache.doris.plsql.executor;
+
+import org.apache.doris.mysql.MysqlEofPacket;
+import org.apache.doris.mysql.MysqlSerializer;
+import org.apache.doris.mysql.MysqlServerStatusFlag;
+import org.apache.doris.plsql.Console;
+import org.apache.doris.plsql.exception.QueryException;
+import org.apache.doris.qe.AutoCloseConnectContext;
+import org.apache.doris.qe.ConnectContext;
+import org.apache.doris.qe.ConnectProcessor;
+import org.apache.doris.qe.QueryState;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+// If running from mysql client, first send schema column,
+// and then send the ByteBuffer through the mysql channel.
+//
+// If running from plsql.sh, send the result directly after serialization.
+public class PlsqlResult implements ResultListener, Console {
+
+ private static final Logger LOG = LogManager.getLogger(PlsqlResult.class);
+ private ConnectProcessor processor;
+ private Metadata metadata = null;
+ private StringBuilder msg;
+ private StringBuilder error;
+ private boolean isSendFields;
+
+ public PlsqlResult() {
+ this.msg = new StringBuilder();
+ this.error = new StringBuilder();
+ this.isSendFields = false;
+ }
+
+ public void reset() {
+ processor = null;
+ metadata = null;
+ isSendFields = false;
+ error.delete(0, error.length());
+ msg.delete(0, msg.length());
+ }
+
+ public void setProcessor(ConnectProcessor processor) {
+ this.processor = processor;
+ }
+
+ public String getMsg() {
+ return msg.toString();
+ }
+
+ public String getError() {
+ return error.toString();
+ }
+
+ @Override
+ public void onMysqlRow(ByteBuffer rows) {
+ ConnectContext ctx = processor != null ? processor.getConnectContext() : ConnectContext.get();
+ sendData(() -> ctx.getMysqlChannel().sendOnePacket(rows));
+ }
+
+ @Override
+ public void onRow(Object[] rows) {
+ ConnectContext ctx = processor != null ? processor.getConnectContext() : ConnectContext.get();
+ sendData(() -> ctx.getMysqlChannel().sendOnePacket(rows));
+ }
+
+ @Override
+ public void onMetadata(Metadata metadata) {
+ this.metadata = metadata;
+ isSendFields = false;
+ }
+
+ @Override
+ public void onEof() {
+ ConnectContext ctx = processor != null ? processor.getConnectContext() : ConnectContext.get();
+ ctx.getState().setEof();
+ try {
+ if (metadata != null && !isSendFields) {
+ sendFields(metadata, ctx.getMysqlChannel().getSerializer());
+ isSendFields = true;
+ }
+ } catch (IOException e) {
+ throw new QueryException(e);
+ }
+ }
+
+ @Override
+ public void onFinalize() {
+ if (metadata == null) {
+ return;
+ }
+ finalizeCommand();
+ metadata = null;
+ }
+
+ private void sendData(Send send) {
+ if (metadata == null) {
+ throw new RuntimeException("The metadata has not been set.");
+ }
+
+ ConnectContext ctx = processor != null ? processor.getConnectContext() : ConnectContext.get();
+ MysqlSerializer serializer = ctx.getMysqlChannel().getSerializer();
+ try {
+ if (!isSendFields) {
+ // For some language driver, getting error packet after fields packet
+ // will be recognized as a success result
+ // so We need to send fields after first batch arrived
+ sendFields(metadata, serializer);
+ isSendFields = true;
+ }
+ serializer.reset();
+ send.apply();
+ } catch (IOException e) {
+ LOG.warn("send data fail.", e);
+ throw new RuntimeException(e);
+ }
+ }
+
+ private void sendFields(Metadata metadata, MysqlSerializer serializer) throws IOException {
+ ConnectContext ctx = processor != null ? processor.getConnectContext() : ConnectContext.get();
+ serializer.reset();
+ serializer.writeVInt(metadata.columnCount());
+ ctx.getMysqlChannel().sendOnePacket(serializer.toByteBuffer());
+ // send field one by one
+ for (int i = 0; i < metadata.columnCount(); ++i) {
+ serializer.reset();
+ serializer.writeField(metadata.columnName(i), metadata.dorisType(i));
+ ctx.getMysqlChannel().sendOnePacket(serializer.toByteBuffer());
+ }
+ // send EOF
+ serializer.reset();
+ MysqlEofPacket eofPacket = new MysqlEofPacket(ctx.getState());
+ eofPacket.writeTo(serializer);
+ ctx.getMysqlChannel().sendOnePacket(serializer.toByteBuffer());
+ }
+
+ @Override
+ public void print(String msg) {
+ this.msg.append(msg);
+ }
+
+ @Override
+ public void printLine(String msg) {
+ this.msg.append(msg).append("\n");
+ }
+
+ @Override
+ public void printError(String msg) {
+ this.error.append(msg);
+ }
+
+ @Override
+ public void flushConsole() {
+ ConnectContext ctx = processor != null ? processor.getConnectContext() : ConnectContext.get();
+ boolean needSend = false;
+ if (error.length() > 0) {
+ ctx.getState().setError("hplsql exec error, " + error.toString());
+ needSend = true;
+ } else if (msg.length() > 0) {
+ ctx.getState().setOk(0, 0, msg.toString());
+ needSend = true;
+ }
+ if (needSend) {
+ finalizeCommand();
+ reset();
+ }
+ }
+
+ private void finalizeCommand() {
+ if (processor != null) {
+ try (AutoCloseConnectContext autoCloseCtx = new AutoCloseConnectContext(processor.getConnectContext())) {
+ autoCloseCtx.call();
+ QueryState state = processor.getConnectContext().getState();
+ // https://dev.mysql.com/doc/dev/mysql-server/latest/page_protocol_command_phase_sp.html
+ state.serverStatus |= MysqlServerStatusFlag.SERVER_MORE_RESULTS_EXISTS;
+ processor.finalizeCommand();
+ state.reset();
+ } catch (IOException e) {
+ throw new QueryException(e);
+ }
+ }
+ }
+
+ @FunctionalInterface
+ public interface Send {
+ void apply() throws IOException;
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/executor/QueryExecutor.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/executor/QueryExecutor.java
new file mode 100644
index 00000000000000..f06e2b27bd16e6
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/executor/QueryExecutor.java
@@ -0,0 +1,33 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/executor/QueryExecutor.java
+// and modified by Doris
+
+package org.apache.doris.plsql.executor;
+
+import org.apache.doris.plsql.exception.PlValidationException;
+
+import org.antlr.v4.runtime.ParserRuleContext;
+
+public interface QueryExecutor {
+ QueryResult executeQuery(String sql, ParserRuleContext ctx);
+
+ QueryExecutor DISABLED = (sql, ctx) -> {
+ throw new PlValidationException(ctx, "Query execution is disabled in this context. Can not execute: " + sql);
+ };
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/executor/QueryResult.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/executor/QueryResult.java
new file mode 100644
index 00000000000000..af1d32155f5f0e
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/executor/QueryResult.java
@@ -0,0 +1,122 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/executor/QueryResult.java
+// and modified by Doris
+
+package org.apache.doris.plsql.executor;
+
+import org.apache.doris.common.AnalysisException;
+import org.apache.doris.nereids.trees.expressions.literal.Literal;
+import org.apache.doris.qe.AutoCloseConnectContext;
+import org.apache.doris.qe.ConnectContext;
+import org.apache.doris.qe.ConnectProcessor;
+
+import java.nio.ByteBuffer;
+import java.util.function.Supplier;
+
+public class QueryResult {
+ private final RowResult rows;
+ private final Supplier metadata;
+ private ConnectProcessor processor;
+ private final Exception exception;
+
+ public QueryResult(RowResult rows, Supplier metadata, ConnectProcessor processor, Exception exception) {
+ this.rows = rows;
+ this.metadata = metadata != null ? memoize(metadata) : null;
+ this.processor = processor;
+ this.exception = exception;
+ }
+
+ public boolean next() {
+ ConnectContext preConnectContext;
+ if (processor() != null) {
+ preConnectContext = processor().getConnectContext();
+ try (AutoCloseConnectContext autoCloseCtx = new AutoCloseConnectContext(preConnectContext)) {
+ autoCloseCtx.call();
+ return rows.next();
+ }
+ }
+ return rows.next();
+ }
+
+ public int columnCount() {
+ return metadata != null ? metadata().columnCount() : 0;
+ }
+
+ /**
+ * Get the nth column from the row result.
+ * The index is 0 based unlike in JDBC.
+ */
+ public T column(int columnIndex, Class type) throws AnalysisException {
+ return rows.get(columnIndex, type);
+ }
+
+ public Literal column(int columnIndex) throws AnalysisException {
+ return rows.get(columnIndex);
+ }
+
+ public ByteBuffer mysqlRow() {
+ return rows.getMysqlRow();
+ }
+
+ public boolean error() {
+ return exception != null;
+ }
+
+ public void printStackTrace() {
+ if (exception != null) {
+ exception.printStackTrace();
+ }
+ }
+
+ public ConnectProcessor processor() {
+ return processor;
+ }
+
+ public Exception exception() {
+ return exception;
+ }
+
+ public Metadata metadata() {
+ return metadata != null ? metadata.get() : null;
+ }
+
+ public int jdbcType(int columnIndex) {
+ return metadata != null ? metadata().jdbcType(columnIndex) : 0;
+ }
+
+ public void close() {
+ if (rows != null) {
+ rows.close();
+ }
+ }
+
+ private static Supplier memoize(Supplier extends T> supplier) {
+ return com.google.common.base.Suppliers.memoize(supplier::get)::get; // cache the supplier result
+ }
+
+ public String errorText() {
+ if (exception != null) {
+ if (exception instanceof ClassNotFoundException) {
+ return "ClassNotFoundException: " + exception.getMessage();
+ }
+ return exception.getMessage();
+ }
+ return "";
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/executor/ResultListener.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/executor/ResultListener.java
new file mode 100644
index 00000000000000..2ffb8480a5d7ca
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/executor/ResultListener.java
@@ -0,0 +1,65 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/ResultListener.java
+// and modified by Doris
+
+package org.apache.doris.plsql.executor;
+
+import org.apache.doris.qe.ConnectProcessor;
+
+import java.nio.ByteBuffer;
+
+public interface ResultListener {
+ void onMysqlRow(ByteBuffer rows);
+
+ void onRow(Object[] rows);
+
+ void onMetadata(Metadata metadata);
+
+ void onEof();
+
+ void onFinalize();
+
+ void setProcessor(ConnectProcessor processor);
+
+ ResultListener NONE = new ResultListener() {
+ @Override
+ public void onMysqlRow(ByteBuffer rows) {
+ }
+
+ @Override
+ public void onRow(Object[] rows) {
+ }
+
+ @Override
+ public void onMetadata(Metadata metadata) {
+ }
+
+ @Override
+ public void onEof() {
+ }
+
+ @Override
+ public void onFinalize() {
+ }
+
+ @Override
+ public void setProcessor(ConnectProcessor processor) {
+ }
+ };
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/executor/RowResult.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/executor/RowResult.java
new file mode 100644
index 00000000000000..79a24b226d4b16
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/executor/RowResult.java
@@ -0,0 +1,38 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/executor/RowResult.java
+// and modified by Doris
+
+package org.apache.doris.plsql.executor;
+
+import org.apache.doris.common.AnalysisException;
+import org.apache.doris.nereids.trees.expressions.literal.Literal;
+
+import java.nio.ByteBuffer;
+
+public interface RowResult {
+ boolean next();
+
+ void close();
+
+ T get(int columnIndex, Class type) throws AnalysisException;
+
+ Literal get(int columnIndex) throws AnalysisException;
+
+ ByteBuffer getMysqlRow();
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/functions/BuiltinFunctions.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/functions/BuiltinFunctions.java
new file mode 100644
index 00000000000000..b28a17d3ab832b
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/functions/BuiltinFunctions.java
@@ -0,0 +1,442 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/functions/BuiltinFunctions.java
+// and modified by Doris
+
+package org.apache.doris.plsql.functions;
+
+import org.apache.doris.common.AnalysisException;
+import org.apache.doris.nereids.PLParser.Expr_func_paramsContext;
+import org.apache.doris.nereids.PLParser.Expr_spec_funcContext;
+import org.apache.doris.nereids.PLParser.Expr_stmtContext;
+import org.apache.doris.plsql.Console;
+import org.apache.doris.plsql.Exec;
+import org.apache.doris.plsql.Utils;
+import org.apache.doris.plsql.Var;
+import org.apache.doris.plsql.exception.QueryException;
+import org.apache.doris.plsql.executor.QueryExecutor;
+import org.apache.doris.plsql.executor.QueryResult;
+
+import org.antlr.v4.runtime.ParserRuleContext;
+
+import java.sql.Date;
+import java.text.SimpleDateFormat;
+import java.util.Calendar;
+import java.util.HashMap;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+public class BuiltinFunctions {
+ protected final Exec exec;
+ protected final Console console;
+ protected boolean trace;
+ protected final QueryExecutor queryExecutor;
+ protected HashMap map = new HashMap<>();
+ protected HashMap specMap = new HashMap<>();
+ protected HashMap specSqlMap = new HashMap<>();
+
+ public BuiltinFunctions(Exec exec, QueryExecutor queryExecutor) {
+ this.exec = exec;
+ this.trace = exec.getTrace();
+ this.console = exec.getConsole();
+ this.queryExecutor = queryExecutor;
+ }
+
+ public void register(BuiltinFunctions f) {
+ }
+
+ public boolean exec(String name, Expr_func_paramsContext ctx) {
+ if (name.contains(".")) { // Name can be qualified and spaces are allowed between parts
+ String[] parts = name.split("\\.");
+ StringBuilder str = new StringBuilder();
+ for (int i = 0; i < parts.length; i++) {
+ if (i > 0) {
+ str.append(".");
+ }
+ str.append(parts[i].trim());
+ }
+ name = str.toString();
+ }
+ if (trace && ctx != null && ctx.parent != null && ctx.parent.parent instanceof Expr_stmtContext) {
+ trace(ctx, "FUNC " + name);
+ }
+ org.apache.doris.plsql.functions.FuncCommand func = map.get(name.toUpperCase());
+ if (func != null) {
+ func.run(ctx);
+ return true;
+ } else {
+ return false;
+ }
+ }
+
+ public boolean exists(String name) {
+ if (name == null) {
+ return false;
+ }
+ name = name.toUpperCase();
+ return map.containsKey(name) || specMap.containsKey(name) || specSqlMap.containsKey(name);
+ }
+
+ /**
+ * Execute a special function
+ */
+ public void specExec(Expr_spec_funcContext ctx) {
+ String name = ctx.start.getText().toUpperCase();
+ if (trace && ctx.parent.parent instanceof Expr_stmtContext) {
+ trace(ctx, "FUNC " + name);
+ }
+ org.apache.doris.plsql.functions.FuncSpecCommand func = specMap.get(name);
+ if (func != null) {
+ func.run(ctx);
+ } else if (ctx.MAX_PART_STRING() != null) {
+ execMaxPartString(ctx);
+ } else if (ctx.MIN_PART_STRING() != null) {
+ execMinPartString(ctx);
+ } else if (ctx.MAX_PART_INT() != null) {
+ execMaxPartInt(ctx);
+ } else if (ctx.MIN_PART_INT() != null) {
+ execMinPartInt(ctx);
+ } else if (ctx.MAX_PART_DATE() != null) {
+ execMaxPartDate(ctx);
+ } else if (ctx.MIN_PART_DATE() != null) {
+ execMinPartDate(ctx);
+ } else if (ctx.PART_LOC() != null) {
+ execPartLoc(ctx);
+ } else {
+ evalNull();
+ }
+ }
+
+ /**
+ * Execute a special function in executable SQL statement
+ */
+ public void specExecSql(Expr_spec_funcContext ctx) {
+ String name = ctx.start.getText().toUpperCase();
+ if (trace && ctx.parent.parent instanceof Expr_stmtContext) {
+ trace(ctx, "FUNC " + name);
+ }
+ org.apache.doris.plsql.functions.FuncSpecCommand func = specSqlMap.get(name);
+ if (func != null) {
+ func.run(ctx);
+ } else {
+ exec.stackPush(Exec.getFormattedText(ctx));
+ }
+ }
+
+ /**
+ * Get the current date
+ */
+ public void execCurrentDate(Expr_spec_funcContext ctx) {
+ if (trace) {
+ trace(ctx, "CURRENT_DATE");
+ }
+ SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd");
+ String s = f.format(Calendar.getInstance().getTime());
+ exec.stackPush(new Var(Var.Type.DATE, Utils.toDate(s)));
+ }
+
+ /**
+ * Execute MAX_PART_STRING function
+ */
+ public void execMaxPartString(Expr_spec_funcContext ctx) {
+ if (trace) {
+ trace(ctx, "MAX_PART_STRING");
+ }
+ execMinMaxPart(ctx, Var.Type.STRING, true /*max*/);
+ }
+
+ /**
+ * Execute MIN_PART_STRING function
+ */
+ public void execMinPartString(Expr_spec_funcContext ctx) {
+ if (trace) {
+ trace(ctx, "MIN_PART_STRING");
+ }
+ execMinMaxPart(ctx, Var.Type.STRING, false /*max*/);
+ }
+
+ /**
+ * Execute MAX_PART_INT function
+ */
+ public void execMaxPartInt(Expr_spec_funcContext ctx) {
+ if (trace) {
+ trace(ctx, "MAX_PART_INT");
+ }
+ execMinMaxPart(ctx, Var.Type.BIGINT, true /*max*/);
+ }
+
+ /**
+ * Execute MIN_PART_INT function
+ */
+ public void execMinPartInt(Expr_spec_funcContext ctx) {
+ if (trace) {
+ trace(ctx, "MIN_PART_INT");
+ }
+ execMinMaxPart(ctx, Var.Type.BIGINT, false /*max*/);
+ }
+
+ /**
+ * Execute MAX_PART_DATE function
+ */
+ public void execMaxPartDate(Expr_spec_funcContext ctx) {
+ if (trace) {
+ trace(ctx, "MAX_PART_DATE");
+ }
+ execMinMaxPart(ctx, Var.Type.DATE, true /*max*/);
+ }
+
+ /**
+ * Execute MIN_PART_DATE function
+ */
+ public void execMinPartDate(Expr_spec_funcContext ctx) {
+ if (trace) {
+ trace(ctx, "MIN_PART_DATE");
+ }
+ execMinMaxPart(ctx, Var.Type.DATE, false /*max*/);
+ }
+
+ /**
+ * Execute MIN or MAX partition function
+ */
+ public void execMinMaxPart(Expr_spec_funcContext ctx, Var.Type type, boolean max) {
+ String tabname = evalPop(ctx.expr(0)).toString();
+ StringBuilder sql = new StringBuilder("SHOW PARTITIONS " + tabname);
+ String colname = null;
+ int colnum = -1;
+ int exprnum = ctx.expr().size();
+ // Column name
+ if (ctx.expr(1) != null) {
+ colname = evalPop(ctx.expr(1)).toString();
+ } else {
+ colnum = 0;
+ }
+ // Partition filter
+ if (exprnum >= 4) {
+ sql.append(" PARTITION (");
+ int i = 2;
+ while (i + 1 < exprnum) {
+ String fcol = evalPop(ctx.expr(i)).toString();
+ String fval = evalPop(ctx.expr(i + 1)).toSqlString();
+ if (i > 2) {
+ sql.append(", ");
+ }
+ sql.append(fcol).append("=").append(fval);
+ i += 2;
+ }
+ sql.append(")");
+ }
+ if (trace) {
+ trace(ctx, "Query: " + sql);
+ }
+ if (exec.getOffline()) {
+ evalNull();
+ return;
+ }
+ QueryResult query = queryExecutor.executeQuery(sql.toString(), ctx);
+ if (query.error()) {
+ evalNullClose(query);
+ return;
+ }
+ try {
+ String resultString = null;
+ Long resultInt = null;
+ Date resultDate = null;
+ while (query.next()) {
+ String[] parts = query.column(0, String.class).split("/");
+ // Find partition column by name
+ if (colnum == -1) {
+ for (int i = 0; i < parts.length; i++) {
+ String[] name = parts[i].split("=");
+ if (name[0].equalsIgnoreCase(colname)) {
+ colnum = i;
+ break;
+ }
+ }
+ // No partition column with the specified name exists
+ if (colnum == -1) {
+ evalNullClose(query);
+ return;
+ }
+ }
+ String[] pair = parts[colnum].split("=");
+ if (type == Var.Type.STRING) {
+ resultString = Utils.minMaxString(resultString, pair[1], max);
+ } else if (type == Var.Type.BIGINT) {
+ resultInt = Utils.minMaxInt(resultInt, pair[1], max);
+ } else if (type == Var.Type.DATE) {
+ resultDate = Utils.minMaxDate(resultDate, pair[1], max);
+ }
+ }
+ if (resultString != null) {
+ evalString(resultString);
+ } else if (resultInt != null) {
+ evalInt(resultInt);
+ } else if (resultDate != null) {
+ evalDate(resultDate);
+ } else {
+ evalNull();
+ }
+ } catch (QueryException | AnalysisException ignored) {
+ // ignored
+ }
+ query.close();
+ }
+
+ /**
+ * Execute PART_LOC function
+ */
+ public void execPartLoc(Expr_spec_funcContext ctx) {
+ String tabname = evalPop(ctx.expr(0)).toString();
+ StringBuilder sql = new StringBuilder("DESCRIBE EXTENDED " + tabname);
+ int exprnum = ctx.expr().size();
+ boolean hostname = false;
+ // Partition filter
+ if (exprnum > 1) {
+ sql.append(" PARTITION (");
+ int i = 1;
+ while (i + 1 < exprnum) {
+ String col = evalPop(ctx.expr(i)).toString();
+ String val = evalPop(ctx.expr(i + 1)).toSqlString();
+ if (i > 2) {
+ sql.append(", ");
+ }
+ sql.append(col).append("=").append(val);
+ i += 2;
+ }
+ sql.append(")");
+ }
+ // With host name
+ if (exprnum % 2 == 0 && evalPop(ctx.expr(exprnum - 1)).intValue() == 1) {
+ hostname = true;
+ }
+ if (trace) {
+ trace(ctx, "Query: " + sql);
+ }
+ if (exec.getOffline()) {
+ evalNull();
+ return;
+ }
+ QueryResult query = queryExecutor.executeQuery(sql.toString(), ctx);
+ if (query.error()) {
+ evalNullClose(query);
+ return;
+ }
+ String result = null;
+ try {
+ while (query.next()) {
+ if (query.column(0, String.class).startsWith("Detailed Partition Information")) {
+ Matcher m = Pattern.compile(".*, location:(.*?),.*").matcher(query.column(1, String.class));
+ if (m.find()) {
+ result = m.group(1);
+ }
+ }
+ }
+ } catch (QueryException | AnalysisException ignored) {
+ // ignored
+ }
+ if (result != null) {
+ // Remove the host name
+ if (!hostname) {
+ Matcher m = Pattern.compile(".*://.*?(/.*)").matcher(result);
+ if (m.find()) {
+ result = m.group(1);
+ }
+ }
+ evalString(result);
+ } else {
+ evalNull();
+ }
+ query.close();
+ }
+
+ public void trace(ParserRuleContext ctx, String message) {
+ if (trace) {
+ exec.trace(ctx, message);
+ }
+ }
+
+ protected void evalNull() {
+ exec.stackPush(Var.Null);
+ }
+
+ protected void evalString(String string) {
+ exec.stackPush(new Var(string));
+ }
+
+ protected Var evalPop(ParserRuleContext ctx) {
+ exec.visit(ctx);
+ return exec.stackPop();
+ }
+
+ protected void evalInt(Long i) {
+ exec.stackPush(new Var(i));
+ }
+
+ protected void evalDate(Date date) {
+ exec.stackPush(new Var(Var.Type.DATE, date));
+ }
+
+ protected void evalNullClose(QueryResult query) {
+ exec.stackPush(Var.Null);
+ query.close();
+ if (trace) {
+ query.printStackTrace();
+ }
+ }
+
+ protected void evalVar(Var var) {
+ exec.stackPush(var);
+ }
+
+ protected void evalString(StringBuilder string) {
+ evalString(string.toString());
+ }
+
+ protected void evalInt(int i) {
+ evalInt(Long.valueOf(i));
+ }
+
+ protected Var evalPop(ParserRuleContext ctx, int value) {
+ if (ctx != null) {
+ return evalPop(ctx);
+ }
+ return new Var(Long.valueOf(value));
+ }
+
+ /**
+ * Get the number of parameters in function call
+ */
+ public static int getParamCount(Expr_func_paramsContext ctx) {
+ if (ctx == null) {
+ return 0;
+ }
+ return ctx.func_param().size();
+ }
+
+ protected void eval(ParserRuleContext ctx) {
+ exec.visit(ctx);
+ }
+
+ protected Integer visit(ParserRuleContext ctx) {
+ return exec.visit(ctx);
+ }
+
+ protected void info(ParserRuleContext ctx, String message) {
+ exec.info(ctx, message);
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/functions/DorisFunctionRegistry.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/functions/DorisFunctionRegistry.java
new file mode 100644
index 00000000000000..d28ebaaa6448f2
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/functions/DorisFunctionRegistry.java
@@ -0,0 +1,240 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/functions/HmsFunctionRegistry.java
+// and modified by Doris
+
+package org.apache.doris.plsql.functions;
+
+import org.apache.doris.nereids.PLLexer;
+import org.apache.doris.nereids.PLParser;
+import org.apache.doris.nereids.PLParser.Create_function_stmtContext;
+import org.apache.doris.nereids.PLParser.Create_procedure_stmtContext;
+import org.apache.doris.nereids.PLParser.Expr_func_paramsContext;
+import org.apache.doris.nereids.PLParserBaseVisitor;
+import org.apache.doris.nereids.parser.CaseInsensitiveStream;
+import org.apache.doris.nereids.trees.plans.commands.info.FuncNameInfo;
+import org.apache.doris.plsql.Exec;
+import org.apache.doris.plsql.Scope;
+import org.apache.doris.plsql.Var;
+import org.apache.doris.plsql.metastore.PlsqlMetaClient;
+import org.apache.doris.plsql.metastore.PlsqlStoredProcedure;
+import org.apache.doris.qe.ConnectContext;
+
+import org.antlr.v4.runtime.CharStreams;
+import org.antlr.v4.runtime.CommonTokenStream;
+import org.antlr.v4.runtime.ParserRuleContext;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Optional;
+
+public class DorisFunctionRegistry implements FunctionRegistry {
+ private final Exec exec;
+ private final boolean trace;
+ private final PlsqlMetaClient client;
+ private final BuiltinFunctions builtinFunctions;
+ private final Map cache = new HashMap<>();
+
+ public DorisFunctionRegistry(Exec e, PlsqlMetaClient client, BuiltinFunctions builtinFunctions) {
+ this.exec = e;
+ this.client = client;
+ this.builtinFunctions = builtinFunctions;
+ this.trace = exec.getTrace();
+ }
+
+ @Override
+ public boolean exists(FuncNameInfo procedureName) {
+ return isCached(procedureName.toString()) || getProc(procedureName).isPresent();
+ }
+
+ @Override
+ public void remove(FuncNameInfo procedureName) {
+ try {
+ client.dropPlsqlStoredProcedure(procedureName.getName(), procedureName.getCtl(),
+ procedureName.getDb());
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ private boolean isCached(String name) {
+ return cache.containsKey(qualified(name));
+ }
+
+ @Override
+ public void removeCached(String name) {
+ cache.remove(qualified(name));
+ }
+
+ private String qualified(String name) {
+ return (ConnectContext.get().getDatabase() + "." + name).toUpperCase();
+ }
+
+
+ @Override
+ public boolean exec(FuncNameInfo procedureName, Expr_func_paramsContext ctx) {
+ if (builtinFunctions.exec(procedureName.toString(), ctx)) { // First look for built-in functions.
+ return true;
+ }
+ if (isCached(procedureName.toString())) {
+ trace(ctx, "EXEC CACHED FUNCTION " + procedureName);
+ execProcOrFunc(ctx, cache.get(qualified(procedureName.toString())), procedureName.toString());
+ return true;
+ }
+ Optional proc = getProc(procedureName);
+ if (proc.isPresent()) {
+ trace(ctx, "EXEC HMS FUNCTION " + procedureName);
+ ParserRuleContext procCtx = parse(proc.get());
+ execProcOrFunc(ctx, procCtx, procedureName.toString());
+ saveInCache(procedureName.toString(), procCtx);
+ return true;
+ }
+ return false;
+ }
+
+ /**
+ * Execute a stored procedure using CALL or EXEC statement passing parameters
+ */
+ private void execProcOrFunc(Expr_func_paramsContext ctx, ParserRuleContext procCtx, String name) {
+ exec.callStackPush(name);
+ HashMap out = new HashMap<>();
+ ArrayList actualParams = getActualCallParameters(ctx);
+ exec.enterScope(Scope.Type.ROUTINE);
+ callWithParameters(ctx, procCtx, out, actualParams);
+ exec.callStackPop();
+ exec.leaveScope();
+ for (Map.Entry i : out.entrySet()) { // Set OUT parameters
+ exec.setVariable(i.getKey(), i.getValue());
+ }
+ }
+
+ private void callWithParameters(Expr_func_paramsContext ctx, ParserRuleContext procCtx,
+ HashMap out, ArrayList actualParams) {
+ if (procCtx instanceof Create_function_stmtContext) {
+ Create_function_stmtContext func = (Create_function_stmtContext) procCtx;
+ InMemoryFunctionRegistry.setCallParameters(func.multipartIdentifier().getText(), ctx, actualParams,
+ func.create_routine_params(), null, exec);
+ if (func.declare_block_inplace() != null) {
+ exec.visit(func.declare_block_inplace());
+ }
+ exec.visit(func.single_block_stmt());
+ } else {
+ Create_procedure_stmtContext proc = (Create_procedure_stmtContext) procCtx;
+ InMemoryFunctionRegistry.setCallParameters(proc.multipartIdentifier().getText(), ctx, actualParams,
+ proc.create_routine_params(), out, exec);
+ exec.visit(proc.procedure_block());
+ }
+ }
+
+ private ParserRuleContext parse(PlsqlStoredProcedure proc) {
+ PLLexer lexer = new PLLexer(new CaseInsensitiveStream(CharStreams.fromString(proc.getSource())));
+ CommonTokenStream tokens = new CommonTokenStream(lexer);
+ PLParser parser = new PLParser(tokens);
+ ProcedureVisitor visitor = new ProcedureVisitor();
+ parser.program().accept(visitor);
+ return visitor.func != null ? visitor.func : visitor.proc;
+ }
+
+ private Optional getProc(FuncNameInfo procedureName) {
+ return Optional.ofNullable(
+ client.getPlsqlStoredProcedure(procedureName.getName(), procedureName.getCtl(),
+ procedureName.getDb()));
+ }
+
+ private ArrayList getActualCallParameters(Expr_func_paramsContext actual) {
+ if (actual == null || actual.func_param() == null) {
+ return null;
+ }
+ int cnt = actual.func_param().size();
+ ArrayList values = new ArrayList<>(cnt);
+ for (int i = 0; i < cnt; i++) {
+ values.add(evalPop(actual.func_param(i).expr()));
+ }
+ return values;
+ }
+
+ @Override
+ public void addUserFunction(Create_function_stmtContext ctx) {
+ FuncNameInfo procedureName = new FuncNameInfo(
+ exec.logicalPlanBuilder.visitMultipartIdentifier(ctx.multipartIdentifier()));
+ if (builtinFunctions.exists(procedureName.toString())) {
+ exec.info(ctx, procedureName.toString() + " is a built-in function which cannot be redefined.");
+ return;
+ }
+ trace(ctx, "CREATE FUNCTION " + procedureName.toString());
+ saveInCache(procedureName.toString(), ctx);
+ saveStoredProc(procedureName, Exec.getFormattedText(ctx), ctx.REPLACE() != null);
+ }
+
+ @Override
+ public void addUserProcedure(Create_procedure_stmtContext ctx) {
+ FuncNameInfo procedureName = new FuncNameInfo(
+ exec.logicalPlanBuilder.visitMultipartIdentifier(ctx.multipartIdentifier()));
+ if (builtinFunctions.exists(procedureName.toString())) {
+ exec.info(ctx, procedureName.toString() + " is a built-in function which cannot be redefined.");
+ return;
+ }
+ trace(ctx, "CREATE PROCEDURE " + procedureName.toString());
+ saveInCache(procedureName.toString(), ctx);
+ saveStoredProc(procedureName, Exec.getFormattedText(ctx), ctx.REPLACE() != null);
+ }
+
+ private void saveStoredProc(FuncNameInfo procedureName, String source, boolean isForce) {
+ client.addPlsqlStoredProcedure(procedureName.getName(), procedureName.getCtl(),
+ procedureName.getDb(),
+ ConnectContext.get().getQualifiedUser(), source, isForce);
+ }
+
+ private void saveInCache(String name, ParserRuleContext procCtx) {
+ // TODO, removeCached needs to be synchronized to all Observer FEs.
+ // Even if it is always executed on the Master FE, it still has to deal with Master switching.
+ // cache.put(qualified(name.toUpperCase()), procCtx);
+ }
+
+ /**
+ * Evaluate the expression and pop value from the stack
+ */
+ private Var evalPop(ParserRuleContext ctx) {
+ exec.visit(ctx);
+ return exec.stackPop();
+ }
+
+ private void trace(ParserRuleContext ctx, String message) {
+ if (trace) {
+ exec.trace(ctx, message);
+ }
+ }
+
+ private static class ProcedureVisitor extends PLParserBaseVisitor {
+ Create_function_stmtContext func;
+ Create_procedure_stmtContext proc;
+
+ @Override
+ public Void visitCreate_procedure_stmt(Create_procedure_stmtContext ctx) {
+ proc = ctx;
+ return null;
+ }
+
+ @Override
+ public Void visitCreate_function_stmt(Create_function_stmtContext ctx) {
+ func = ctx;
+ return null;
+ }
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/functions/FuncCommand.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/functions/FuncCommand.java
new file mode 100644
index 00000000000000..d58ded0b8383ef
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/functions/FuncCommand.java
@@ -0,0 +1,27 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/functions/InMemoryFunctionRegistry.java
+// and modified by Doris
+
+package org.apache.doris.plsql.functions;
+
+import org.apache.doris.nereids.PLParser.Expr_func_paramsContext;
+
+interface FuncCommand {
+ void run(Expr_func_paramsContext ctx);
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/functions/FuncSpecCommand.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/functions/FuncSpecCommand.java
new file mode 100644
index 00000000000000..15539fe562a0df
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/functions/FuncSpecCommand.java
@@ -0,0 +1,27 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/functions/InMemoryFunctionRegistry.java
+// and modified by Doris
+
+package org.apache.doris.plsql.functions;
+
+import org.apache.doris.nereids.PLParser.Expr_spec_funcContext;
+
+interface FuncSpecCommand {
+ void run(Expr_spec_funcContext ctx);
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/functions/FunctionDatetime.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/functions/FunctionDatetime.java
new file mode 100644
index 00000000000000..ee1caabd56c977
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/functions/FunctionDatetime.java
@@ -0,0 +1,203 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/functions/FunctionDatetime.java
+// and modified by Doris
+
+package org.apache.doris.plsql.functions;
+
+import org.apache.doris.nereids.PLParser.Expr_func_paramsContext;
+import org.apache.doris.nereids.PLParser.Expr_spec_funcContext;
+import org.apache.doris.plsql.Exec;
+import org.apache.doris.plsql.Var;
+import org.apache.doris.plsql.executor.QueryExecutor;
+
+import org.apache.commons.lang3.StringUtils;
+
+import java.sql.Timestamp;
+import java.text.SimpleDateFormat;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.TimeZone;
+
+public class FunctionDatetime extends BuiltinFunctions {
+ public FunctionDatetime(Exec e, QueryExecutor queryExecutor) {
+ super(e, queryExecutor);
+ }
+
+ /**
+ * Register functions
+ */
+ @Override
+ public void register(BuiltinFunctions f) {
+ f.map.put("DATE", this::date);
+ f.map.put("FROM_UNIXTIME", this::fromUnixtime);
+ f.map.put("NOW", ctx -> now(ctx));
+ f.map.put("TIMESTAMP_ISO", this::timestampIso);
+ f.map.put("TO_TIMESTAMP", this::toTimestamp);
+ f.map.put("UNIX_TIMESTAMP", this::unixTimestamp);
+ f.map.put("CURRENT_TIME_MILLIS", this::currentTimeMillis);
+
+ f.specMap.put("CURRENT_DATE", this::currentDate);
+ f.specMap.put("CURRENT_TIMESTAMP", this::currentTimestamp);
+ f.specMap.put("SYSDATE", this::currentTimestamp);
+
+ f.specSqlMap.put("CURRENT_DATE",
+ (org.apache.doris.plsql.functions.FuncSpecCommand) this::currentDateSql);
+ f.specSqlMap.put("CURRENT_TIMESTAMP",
+ (org.apache.doris.plsql.functions.FuncSpecCommand) this::currentTimestampSql);
+ }
+
+ /**
+ * CURRENT_DATE
+ */
+ public void currentDate(Expr_spec_funcContext ctx) {
+ evalVar(currentDate());
+ }
+
+ public static Var currentDate() {
+ SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd");
+ String s = f.format(Calendar.getInstance().getTime());
+ return new Var(org.apache.doris.plsql.Var.Type.DATE,
+ org.apache.doris.plsql.Utils.toDate(s));
+ }
+
+ /**
+ * CURRENT_DATE in executable SQL statement
+ */
+ public void currentDateSql(Expr_spec_funcContext ctx) {
+ if (exec.getConnectionType() == org.apache.doris.plsql.Conn.Type.HIVE) {
+ evalString("TO_DATE(FROM_UNIXTIME(UNIX_TIMESTAMP()))");
+ } else {
+ evalString(exec.getFormattedText(ctx));
+ }
+ }
+
+ /**
+ * CURRENT_TIMESTAMP
+ */
+ public void currentTimestamp(Expr_spec_funcContext ctx) {
+ int precision = evalPop(ctx.expr(0), 3).intValue();
+ evalVar(currentTimestamp(precision));
+ }
+
+ public static Var currentTimestamp(int precision) {
+ String format = "yyyy-MM-dd HH:mm:ss";
+ if (precision > 0 && precision <= 3) {
+ format += "." + StringUtils.repeat("S", precision);
+ }
+ SimpleDateFormat f = new SimpleDateFormat(format);
+ String s = f.format(Calendar.getInstance(TimeZone.getDefault()).getTime());
+ return new Var(org.apache.doris.plsql.Utils.toTimestamp(s), precision);
+ }
+
+ /**
+ * CURRENT_TIMESTAMP in executable SQL statement
+ */
+ public void currentTimestampSql(Expr_spec_funcContext ctx) {
+ if (exec.getConnectionType() == org.apache.doris.plsql.Conn.Type.HIVE) {
+ evalString("FROM_UNIXTIME(UNIX_TIMESTAMP())");
+ } else {
+ evalString(org.apache.doris.plsql.Exec.getFormattedText(ctx));
+ }
+ }
+
+ /**
+ * DATE function
+ */
+ void date(Expr_func_paramsContext ctx) {
+ if (ctx.func_param().size() != 1) {
+ evalNull();
+ return;
+ }
+ Var var = new Var(org.apache.doris.plsql.Var.Type.DATE);
+ var.cast(evalPop(ctx.func_param(0).expr()));
+ evalVar(var);
+ }
+
+ /**
+ * NOW() function (current date and time)
+ */
+ void now(Expr_func_paramsContext ctx) {
+ if (ctx != null) {
+ evalNull();
+ return;
+ }
+ evalVar(currentTimestamp(3));
+ }
+
+ /**
+ * TIMESTAMP_ISO function
+ */
+ void timestampIso(Expr_func_paramsContext ctx) {
+ if (ctx.func_param().size() != 1) {
+ evalNull();
+ return;
+ }
+ Var var = new Var(org.apache.doris.plsql.Var.Type.TIMESTAMP);
+ var.cast(evalPop(ctx.func_param(0).expr()));
+ evalVar(var);
+ }
+
+ /**
+ * TO_TIMESTAMP function
+ */
+ void toTimestamp(Expr_func_paramsContext ctx) {
+ if (ctx.func_param().size() != 2) {
+ evalNull();
+ return;
+ }
+ String value = evalPop(ctx.func_param(0).expr()).toString();
+ String sqlFormat = evalPop(ctx.func_param(1).expr()).toString();
+ String format = org.apache.doris.plsql.Utils.convertSqlDatetimeFormat(sqlFormat);
+ try {
+ long timeInMs = new SimpleDateFormat(format).parse(value).getTime();
+ evalVar(new Var(org.apache.doris.plsql.Var.Type.TIMESTAMP, new Timestamp(timeInMs)));
+ } catch (Exception e) {
+ exec.signal(e);
+ evalNull();
+ }
+ }
+
+ /**
+ * FROM_UNIXTIME() function (convert seconds since 1970-01-01 00:00:00 to timestamp)
+ */
+ void fromUnixtime(Expr_func_paramsContext ctx) {
+ int cnt = getParamCount(ctx);
+ if (cnt == 0) {
+ evalNull();
+ return;
+ }
+ long epoch = evalPop(ctx.func_param(0).expr()).longValue();
+ String format = "yyyy-MM-dd HH:mm:ss";
+ if (cnt > 1) {
+ format = evalPop(ctx.func_param(1).expr()).toString();
+ }
+ evalString(new SimpleDateFormat(format).format(new Date(epoch * 1000)));
+ }
+
+ /**
+ * UNIX_TIMESTAMP() function (current date and time in seconds since 1970-01-01 00:00:00)
+ */
+ void unixTimestamp(Expr_func_paramsContext ctx) {
+ evalVar(new Var(System.currentTimeMillis() / 1000));
+ }
+
+ public void currentTimeMillis(Expr_func_paramsContext ctx) {
+ evalVar(new Var(System.currentTimeMillis()));
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/functions/FunctionMisc.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/functions/FunctionMisc.java
new file mode 100644
index 00000000000000..5d2a74d3841b58
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/functions/FunctionMisc.java
@@ -0,0 +1,315 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/functions/FunctionMisc.java
+// and modified by Doris
+
+package org.apache.doris.plsql.functions;
+
+import org.apache.doris.common.AnalysisException;
+import org.apache.doris.nereids.PLParser.Expr_func_paramsContext;
+import org.apache.doris.nereids.PLParser.Expr_spec_funcContext;
+import org.apache.doris.plsql.Conn;
+import org.apache.doris.plsql.Exec;
+import org.apache.doris.plsql.Var;
+import org.apache.doris.plsql.exception.QueryException;
+import org.apache.doris.plsql.executor.QueryExecutor;
+import org.apache.doris.plsql.executor.QueryResult;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Map;
+
+public class FunctionMisc extends BuiltinFunctions {
+ public FunctionMisc(Exec e, QueryExecutor queryExecutor) {
+ super(e, queryExecutor);
+ }
+
+ /**
+ * Register functions
+ */
+ @Override
+ public void register(BuiltinFunctions f) {
+ f.map.put("COALESCE", this::nvl);
+ f.map.put("DECODE", this::decode);
+ f.map.put("NVL", this::nvl);
+ f.map.put("NVL2", this::nvl2);
+ f.map.put("PART_COUNT_BY", this::partCountBy);
+ f.map.put("MOD", this::modulo);
+
+ f.specMap.put("ACTIVITY_COUNT", this::activityCount);
+ f.specMap.put("CAST", this::cast);
+ f.specMap.put("CURRENT", this::current);
+ f.specMap.put("CURRENT_USER", this::currentUser);
+ f.specMap.put("PART_COUNT", this::partCount);
+ f.specMap.put("USER", this::currentUser);
+
+ f.specSqlMap.put("CURRENT", this::currentSql);
+ }
+
+ /**
+ * ACTIVITY_COUNT function (built-in variable)
+ */
+ void activityCount(Expr_spec_funcContext ctx) {
+ evalInt(Long.valueOf(exec.getRowCount()));
+ }
+
+ /**
+ * CAST function
+ */
+ void cast(Expr_spec_funcContext ctx) {
+ if (ctx.expr().size() != 1) {
+ evalNull();
+ return;
+ }
+ String type = ctx.dtype().getText();
+ String len = null;
+ String scale = null;
+ if (ctx.dtype_len() != null) {
+ len = ctx.dtype_len().INTEGER_VALUE(0).getText();
+ if (ctx.dtype_len().INTEGER_VALUE(1) != null) {
+ scale = ctx.dtype_len().INTEGER_VALUE(1).getText();
+ }
+ }
+ Var var = new Var(null, type, len, scale, null);
+ var.cast(evalPop(ctx.expr(0)));
+ evalVar(var);
+ }
+
+ /**
+ * CURRENT function
+ */
+ void current(Expr_spec_funcContext ctx) {
+ if (ctx.DATE() != null) {
+ evalVar(FunctionDatetime.currentDate());
+ } else if (ctx.TIMESTAMP() != null) {
+ int precision = evalPop(ctx.expr(0), 3).intValue();
+ evalVar(FunctionDatetime.currentTimestamp(precision));
+ } else if (ctx.USER() != null) {
+ evalVar(FunctionMisc.currentUser());
+ } else {
+ evalNull();
+ }
+ }
+
+ /**
+ * CURRENT function in executable SQL statement
+ */
+ void currentSql(Expr_spec_funcContext ctx) {
+ if (ctx.DATE() != null) {
+ if (exec.getConnectionType() == Conn.Type.HIVE) {
+ evalString("TO_DATE(FROM_UNIXTIME(UNIX_TIMESTAMP()))");
+ } else {
+ evalString("CURRENT_DATE");
+ }
+ } else if (ctx.TIMESTAMP() != null) {
+ if (exec.getConnectionType() == Conn.Type.HIVE) {
+ evalString("FROM_UNIXTIME(UNIX_TIMESTAMP())");
+ } else {
+ evalString("CURRENT_TIMESTAMP");
+ }
+ } else {
+ evalString(exec.getFormattedText(ctx));
+ }
+ }
+
+ /**
+ * CURRENT_USER function
+ */
+ void currentUser(Expr_spec_funcContext ctx) {
+ evalVar(currentUser());
+ }
+
+ public static Var currentUser() {
+ return new Var(System.getProperty("user.name"));
+ }
+
+ /**
+ * DECODE function
+ */
+ void decode(Expr_func_paramsContext ctx) {
+ int cnt = ctx.func_param().size();
+ if (cnt < 3) {
+ evalNull();
+ return;
+ }
+ Var value = evalPop(ctx.func_param(0).expr());
+ int i = 1;
+ while (i + 1 < cnt) {
+ Var when = evalPop(ctx.func_param(i).expr());
+ if ((value.isNull() && when.isNull()) || value.equals(when)) {
+ eval(ctx.func_param(i + 1).expr());
+ return;
+ }
+ i += 2;
+ }
+ if (i < cnt) { // ELSE expression
+ eval(ctx.func_param(i).expr());
+ } else {
+ evalNull();
+ }
+ }
+
+ /**
+ * NVL function - Return first non-NULL expression
+ */
+ void nvl(Expr_func_paramsContext ctx) {
+ for (int i = 0; i < ctx.func_param().size(); i++) {
+ Var v = evalPop(ctx.func_param(i).expr());
+ if (v.type != Var.Type.NULL) {
+ exec.stackPush(v);
+ return;
+ }
+ }
+ evalNull();
+ }
+
+ /**
+ * NVL2 function - If expr1 is not NULL return expr2, otherwise expr3
+ */
+ void nvl2(Expr_func_paramsContext ctx) {
+ if (ctx.func_param().size() == 3) {
+ if (!evalPop(ctx.func_param(0).expr()).isNull()) {
+ eval(ctx.func_param(1).expr());
+ } else {
+ eval(ctx.func_param(2).expr());
+ }
+ } else {
+ evalNull();
+ }
+ }
+
+ /**
+ * PART_COUNT function
+ */
+ public void partCount(Expr_spec_funcContext ctx) {
+ String tabname = evalPop(ctx.expr(0)).toString();
+ StringBuilder sql = new StringBuilder();
+ sql.append("SHOW PARTITIONS ");
+ sql.append(tabname);
+ int cnt = ctx.expr().size();
+ if (cnt > 1) {
+ sql.append(" PARTITION (");
+ int i = 1;
+ while (i + 1 < cnt) {
+ String col = evalPop(ctx.expr(i)).toString();
+ String val = evalPop(ctx.expr(i + 1)).toSqlString();
+ if (i > 2) {
+ sql.append(", ");
+ }
+ sql.append(col);
+ sql.append("=");
+ sql.append(val);
+ i += 2;
+ }
+ sql.append(")");
+ }
+ if (trace) {
+ trace(ctx, "Query: " + sql);
+ }
+ if (exec.getOffline()) {
+ evalNull();
+ return;
+ }
+ QueryResult query = queryExecutor.executeQuery(sql.toString(), ctx);
+ if (query.error()) {
+ evalNullClose(query);
+ return;
+ }
+ int result = 0;
+ try {
+ while (query.next()) {
+ result++;
+ }
+ } catch (Exception e) {
+ evalNullClose(query);
+ return;
+ }
+ evalInt(result);
+ query.close();
+ }
+
+ public void modulo(Expr_func_paramsContext ctx) {
+ if (ctx.func_param().size() == 2) {
+ int a = evalPop(ctx.func_param(0).expr()).intValue();
+ int b = evalPop(ctx.func_param(1).expr()).intValue();
+ evalInt(a % b);
+ } else {
+ evalNull();
+ }
+ }
+
+ /**
+ * PART_COUNT_BY function
+ */
+ public void partCountBy(Expr_func_paramsContext ctx) {
+ int cnt = ctx.func_param().size();
+ if (cnt < 1 || exec.getOffline()) {
+ return;
+ }
+ String tabname = evalPop(ctx.func_param(0).expr()).toString();
+ ArrayList keys = null;
+ if (cnt > 1) {
+ keys = new ArrayList<>();
+ for (int i = 1; i < cnt; i++) {
+ keys.add(evalPop(ctx.func_param(i).expr()).toString().toUpperCase());
+ }
+ }
+ String sql = "SHOW PARTITIONS " + tabname;
+ QueryResult query = queryExecutor.executeQuery(sql, ctx);
+ if (query.error()) {
+ query.close();
+ return;
+ }
+ Map group = new HashMap<>();
+ try {
+ while (query.next()) {
+ String part = query.column(0, String.class);
+ String[] parts = part.split("/");
+ String key = parts[0];
+ if (cnt > 1) {
+ StringBuilder k = new StringBuilder();
+ for (int i = 0; i < parts.length; i++) {
+ if (keys.contains(parts[i].split("=")[0].toUpperCase())) {
+ if (k.length() > 0) {
+ k.append("/");
+ }
+ k.append(parts[i]);
+ }
+ }
+ key = k.toString();
+ }
+ Integer count = group.get(key);
+ if (count == null) {
+ count = Integer.valueOf(0);
+ }
+ group.put(key, count + 1);
+ }
+ } catch (QueryException | AnalysisException e) {
+ query.close();
+ return;
+ }
+ if (cnt == 1) {
+ evalInt(group.size());
+ } else {
+ for (Map.Entry i : group.entrySet()) {
+ console.printLine(i.getKey() + '\t' + i.getValue());
+ }
+ }
+ query.close();
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/functions/FunctionRegistry.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/functions/FunctionRegistry.java
new file mode 100644
index 00000000000000..e45a5eff3e470b
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/functions/FunctionRegistry.java
@@ -0,0 +1,40 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/functions/FunctionRegistry.java
+// and modified by Doris
+
+package org.apache.doris.plsql.functions;
+
+import org.apache.doris.nereids.PLParser.Create_function_stmtContext;
+import org.apache.doris.nereids.PLParser.Create_procedure_stmtContext;
+import org.apache.doris.nereids.PLParser.Expr_func_paramsContext;
+import org.apache.doris.nereids.trees.plans.commands.info.FuncNameInfo;
+
+public interface FunctionRegistry {
+ boolean exec(FuncNameInfo procedureName, Expr_func_paramsContext ctx);
+
+ void addUserFunction(Create_function_stmtContext ctx);
+
+ void addUserProcedure(Create_procedure_stmtContext ctx);
+
+ boolean exists(FuncNameInfo procedureName);
+
+ void remove(FuncNameInfo procedureName);
+
+ void removeCached(String name);
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/functions/FunctionString.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/functions/FunctionString.java
new file mode 100644
index 00000000000000..58aa59a57088cf
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/functions/FunctionString.java
@@ -0,0 +1,290 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/functions/FunctionString.java
+// and modified by Doris
+
+package org.apache.doris.plsql.functions;
+
+import org.apache.doris.nereids.PLParser.Expr_func_paramsContext;
+import org.apache.doris.nereids.PLParser.Expr_spec_funcContext;
+import org.apache.doris.plsql.Exec;
+import org.apache.doris.plsql.executor.QueryExecutor;
+
+public class FunctionString extends BuiltinFunctions {
+ public FunctionString(Exec e, QueryExecutor queryExecutor) {
+ super(e, queryExecutor);
+ }
+
+ /**
+ * Register functions
+ */
+ @Override
+ public void register(BuiltinFunctions f) {
+ f.map.put("CONCAT", this::concat);
+ f.map.put("CHAR", this::char_);
+ f.map.put("INSTR", this::instr);
+ f.map.put("LEN", this::len);
+ f.map.put("LENGTH", this::length);
+ f.map.put("LOWER", this::lower);
+ f.map.put("REPLACE", this::replace);
+ f.map.put("SUBSTR", this::substr);
+ f.map.put("SUBSTRING", this::substr);
+ f.map.put("TO_CHAR", this::toChar);
+ f.map.put("UPPER", this::upper);
+
+ f.specMap.put("SUBSTRING", this::substring);
+ f.specMap.put("TRIM", this::trim);
+ }
+
+ /**
+ * CONCAT function
+ */
+ void concat(Expr_func_paramsContext ctx) {
+ StringBuilder val = new StringBuilder();
+ int cnt = getParamCount(ctx);
+ boolean nulls = true;
+ for (int i = 0; i < cnt; i++) {
+ org.apache.doris.plsql.Var c = evalPop(ctx.func_param(i).expr());
+ if (!c.isNull()) {
+ val.append(c.toString());
+ nulls = false;
+ }
+ }
+ if (nulls) {
+ evalNull();
+ } else {
+ evalString(val);
+ }
+ }
+
+ /**
+ * CHAR function
+ */
+ void char_(Expr_func_paramsContext ctx) {
+ int cnt = getParamCount(ctx);
+ if (cnt != 1) {
+ evalNull();
+ return;
+ }
+ String str = evalPop(ctx.func_param(0).expr()).toString();
+ evalString(str);
+ }
+
+ /**
+ * INSTR function
+ */
+ void instr(Expr_func_paramsContext ctx) {
+ int cnt = getParamCount(ctx);
+ if (cnt < 2) {
+ evalNull();
+ return;
+ }
+ String str = evalPop(ctx.func_param(0).expr()).toString();
+ if (str == null) {
+ evalNull();
+ return;
+ } else if (str.isEmpty()) {
+ evalInt(0);
+ return;
+ }
+ String substr = evalPop(ctx.func_param(1).expr()).toString();
+ int pos = 1;
+ int occur = 1;
+ int idx = 0;
+ if (cnt >= 3) {
+ pos = evalPop(ctx.func_param(2).expr()).intValue();
+ if (pos == 0) {
+ pos = 1;
+ }
+ }
+ if (cnt >= 4) {
+ occur = evalPop(ctx.func_param(3).expr()).intValue();
+ if (occur < 0) {
+ occur = 1;
+ }
+ }
+ for (int i = occur; i > 0; i--) {
+ if (pos > 0) {
+ idx = str.indexOf(substr, pos - 1);
+ } else {
+ str = str.substring(0, str.length() - pos * (-1));
+ idx = str.lastIndexOf(substr);
+ }
+ if (idx == -1) {
+ idx = 0;
+ break;
+ } else {
+ idx++;
+ }
+ if (i > 1) {
+ if (pos > 0) {
+ pos = idx + 1;
+ } else {
+ pos = (str.length() - idx + 1) * (-1);
+ }
+ }
+ }
+ evalInt(idx);
+ }
+
+ /**
+ * LEN function (excluding trailing spaces)
+ */
+ void len(Expr_func_paramsContext ctx) {
+ if (ctx.func_param().size() != 1) {
+ evalNull();
+ return;
+ }
+ int len = evalPop(ctx.func_param(0).expr()).toString().trim().length();
+ evalInt(len);
+ }
+
+ /**
+ * LENGTH function
+ */
+ void length(Expr_func_paramsContext ctx) {
+ if (ctx.func_param().size() != 1) {
+ evalNull();
+ return;
+ }
+ int len = evalPop(ctx.func_param(0).expr()).toString().length();
+ evalInt(len);
+ }
+
+ /**
+ * LOWER function
+ */
+ void lower(Expr_func_paramsContext ctx) {
+ if (ctx.func_param().size() != 1) {
+ evalNull();
+ return;
+ }
+ String str = evalPop(ctx.func_param(0).expr()).toString().toLowerCase();
+ evalString(str);
+ }
+
+ /**
+ * REPLACE function
+ */
+ void replace(Expr_func_paramsContext ctx) {
+ int cnt = getParamCount(ctx);
+ if (cnt < 3) {
+ evalNull();
+ return;
+ }
+ String str = evalPop(ctx.func_param(0).expr()).toString();
+ String what = evalPop(ctx.func_param(1).expr()).toString();
+ String with = evalPop(ctx.func_param(2).expr()).toString();
+ evalString(str.replaceAll(what, with));
+ }
+
+ /**
+ * SUBSTR and SUBSTRING function
+ */
+ void substr(Expr_func_paramsContext ctx) {
+ int cnt = getParamCount(ctx);
+ if (cnt < 2) {
+ evalNull();
+ return;
+ }
+ String str = evalPop(ctx.func_param(0).expr()).toString();
+ int start = evalPop(ctx.func_param(1).expr()).intValue();
+ int len = -1;
+ if (start == 0) {
+ start = 1;
+ }
+ if (cnt > 2) {
+ len = evalPop(ctx.func_param(2).expr()).intValue();
+ }
+ substr(str, start, len);
+ }
+
+ void substr(String str, int start, int len) {
+ if (str == null) {
+ evalNull();
+ return;
+ } else if (str.isEmpty()) {
+ evalString(str);
+ return;
+ }
+ if (start == 0) {
+ start = 1;
+ }
+ if (len == -1) {
+ if (start > 0) {
+ evalString(str.substring(start - 1));
+ }
+ } else {
+ evalString(str.substring(start - 1, start - 1 + len));
+ }
+ }
+
+ /**
+ * SUBSTRING FROM FOR function
+ */
+ void substring(Expr_spec_funcContext ctx) {
+ String str = evalPop(ctx.expr(0)).toString();
+ int start = evalPop(ctx.expr(1)).intValue();
+ int len = -1;
+ if (start == 0) {
+ start = 1;
+ }
+ if (ctx.FOR() != null) {
+ len = evalPop(ctx.expr(2)).intValue();
+ }
+ substr(str, start, len);
+ }
+
+ /**
+ * TRIM function
+ */
+ void trim(Expr_spec_funcContext ctx) {
+ int cnt = ctx.expr().size();
+ if (cnt != 1) {
+ evalNull();
+ return;
+ }
+ String str = evalPop(ctx.expr(0)).toString();
+ evalString(str.trim());
+ }
+
+ /**
+ * TO_CHAR function
+ */
+ void toChar(Expr_func_paramsContext ctx) {
+ int cnt = getParamCount(ctx);
+ if (cnt != 1) {
+ evalNull();
+ return;
+ }
+ String str = evalPop(ctx.func_param(0).expr()).toString();
+ evalString(str);
+ }
+
+ /**
+ * UPPER function
+ */
+ void upper(Expr_func_paramsContext ctx) {
+ if (ctx.func_param().size() != 1) {
+ evalNull();
+ return;
+ }
+ String str = evalPop(ctx.func_param(0).expr()).toString().toUpperCase();
+ evalString(str);
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/functions/InMemoryFunctionRegistry.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/functions/InMemoryFunctionRegistry.java
new file mode 100644
index 00000000000000..ab39f617271616
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/functions/InMemoryFunctionRegistry.java
@@ -0,0 +1,274 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/functions/InMemoryFunctionRegistry.java
+// and modified by Doris
+
+package org.apache.doris.plsql.functions;
+
+import org.apache.doris.nereids.PLParser.Create_function_stmtContext;
+import org.apache.doris.nereids.PLParser.Create_procedure_stmtContext;
+import org.apache.doris.nereids.PLParser.Create_routine_param_itemContext;
+import org.apache.doris.nereids.PLParser.Create_routine_paramsContext;
+import org.apache.doris.nereids.PLParser.ExprContext;
+import org.apache.doris.nereids.PLParser.Expr_func_paramsContext;
+import org.apache.doris.nereids.trees.plans.commands.info.FuncNameInfo;
+import org.apache.doris.plsql.Exec;
+import org.apache.doris.plsql.Scope;
+import org.apache.doris.plsql.Var;
+import org.apache.doris.plsql.exception.ArityException;
+import org.apache.doris.plsql.objects.TableClass;
+
+import org.antlr.v4.runtime.ParserRuleContext;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * PL/SQL functions
+ */
+public class InMemoryFunctionRegistry implements FunctionRegistry {
+ Exec exec;
+ private BuiltinFunctions builtinFunctions;
+ HashMap funcMap = new HashMap<>();
+ HashMap procMap = new HashMap<>();
+ boolean trace = false;
+
+ public InMemoryFunctionRegistry(Exec e, BuiltinFunctions builtinFunctions) {
+ this.exec = e;
+ this.trace = exec.getTrace();
+ this.builtinFunctions = builtinFunctions;
+ }
+
+ @Override
+ public boolean exists(FuncNameInfo procedureName) {
+ return funcMap.containsKey(procedureName.toString()) || procMap.containsKey(procedureName.toString());
+ }
+
+ @Override
+ public void remove(FuncNameInfo procedureName) {
+ funcMap.remove(procedureName.toString());
+ procMap.remove(procedureName.toString());
+ }
+
+ @Override
+ public boolean exec(FuncNameInfo procedureName, Expr_func_paramsContext ctx) {
+ if (builtinFunctions.exec(procedureName.toString(), ctx)) {
+ return true;
+ }
+ if (execFunction(procedureName.toString(), ctx)) {
+ return true;
+ }
+ return (procMap.get(procedureName.toString()) != null && execProc(procedureName.toString(), ctx));
+ }
+
+ @Override
+ public void removeCached(String name) {
+ throw new RuntimeException("no support removeCached");
+ }
+
+ /**
+ * Execute a user-defined function
+ */
+ private boolean execFunction(String name, Expr_func_paramsContext ctx) {
+ Create_function_stmtContext userCtx = funcMap.get(name);
+ if (userCtx == null) {
+ return false;
+ }
+ if (trace) {
+ trace(ctx, "EXEC FUNCTION " + name);
+ }
+ ArrayList actualParams = getActualCallParameters(ctx);
+ exec.enterScope(Scope.Type.ROUTINE);
+ setCallParameters(name, ctx, actualParams, userCtx.create_routine_params(), null, exec);
+ if (userCtx.declare_block_inplace() != null) {
+ visit(userCtx.declare_block_inplace());
+ }
+ visit(userCtx.single_block_stmt());
+ exec.leaveScope();
+ return true;
+ }
+
+ /**
+ * Execute a stored procedure using CALL or EXEC statement passing parameters
+ */
+ private boolean execProc(String name, Expr_func_paramsContext ctx) {
+ if (trace) {
+ trace(ctx == null ? null : ctx.getParent(), "EXEC PROCEDURE " + name);
+ }
+ Create_procedure_stmtContext procCtx = procMap.get(name);
+ if (procCtx == null) {
+ trace(ctx.getParent(), "Procedure not found");
+ return false;
+ }
+ ArrayList actualParams = getActualCallParameters(ctx);
+ HashMap out = new HashMap<>();
+ exec.enterScope(Scope.Type.ROUTINE);
+ exec.callStackPush(name);
+ if (procCtx.declare_block_inplace() != null) {
+ visit(procCtx.declare_block_inplace());
+ }
+ if (procCtx.create_routine_params() != null) {
+ setCallParameters(name, ctx, actualParams, procCtx.create_routine_params(), out, exec);
+ }
+ visit(procCtx.procedure_block());
+ exec.callStackPop();
+ exec.leaveScope();
+ for (Map.Entry i : out.entrySet()) { // Set OUT parameters, related to prepare statement.
+ exec.setVariable(i.getKey(), i.getValue());
+ }
+ return true;
+ }
+
+ /**
+ * Set parameters for user-defined function call
+ */
+ public static void setCallParameters(String procName, Expr_func_paramsContext actual,
+ ArrayList actualValues, Create_routine_paramsContext formal, HashMap out,
+ Exec exec) {
+ if (actual == null || actual.func_param() == null || actualValues == null) {
+ return;
+ }
+ int actualCnt = actualValues.size();
+ int formalCnt = formal.create_routine_param_item().size();
+ if (formalCnt != actualCnt) {
+ throw new ArityException(actual.getParent(), procName, formalCnt, actualCnt);
+ }
+ for (int i = 0; i < actualCnt; i++) {
+ ExprContext a = actual.func_param(i).expr();
+ Create_routine_param_itemContext p = getCallParameter(actual, formal, i);
+ String name = p.ident_pl().getText();
+ String type = p.dtype().getText();
+ String len = null;
+ String scale = null;
+ if (p.dtype_len() != null) {
+ len = p.dtype_len().INTEGER_VALUE(0).getText();
+ if (p.dtype_len().INTEGER_VALUE(1) != null) {
+ scale = p.dtype_len().INTEGER_VALUE(1).getText();
+ }
+ }
+ Var var = setCallParameter(name, type, len, scale, actualValues.get(i), exec);
+ exec.trace(actual, "SET PARAM " + name + " = " + var.toString());
+ if (out != null && a.expr_atom() != null && a.expr_atom().qident() != null && (p.OUT() != null
+ || p.INOUT() != null)) {
+ String actualName = a.expr_atom().qident().getText();
+ if (actualName != null) {
+ out.put(actualName, var);
+ }
+ }
+ }
+ }
+
+ /**
+ * Create a function or procedure parameter and set its value
+ */
+ static Var setCallParameter(String name, String typeName, String len, String scale, Var value, Exec exec) {
+ TableClass plClass = exec.getType(typeName); // Prioritize matching table name
+ Var var = new Var(name, plClass == null ? typeName : Var.Type.PL_OBJECT.name(), len, scale, null);
+ if (plClass != null) {
+ var.setValue(plClass.newInstance());
+ }
+ var.cast(value); // Set var value
+ exec.addVariable(var);
+ return var;
+ }
+
+ /**
+ * Get call parameter definition by name (if specified) or position
+ */
+ static Create_routine_param_itemContext getCallParameter(Expr_func_paramsContext actual,
+ Create_routine_paramsContext formal, int pos) {
+ String named;
+ int outPos = pos;
+ if (actual.func_param(pos).ident_pl() != null) {
+ named = actual.func_param(pos).ident_pl().getText();
+ int cnt = formal.create_routine_param_item().size();
+ for (int i = 0; i < cnt; i++) {
+ if (named.equalsIgnoreCase(formal.create_routine_param_item(i).ident_pl().getText())) {
+ outPos = i;
+ break;
+ }
+ }
+ }
+ return formal.create_routine_param_item(outPos);
+ }
+
+ /**
+ * Evaluate actual call parameters
+ */
+ public ArrayList getActualCallParameters(Expr_func_paramsContext actual) {
+ if (actual == null || actual.func_param() == null) {
+ return null;
+ }
+ int cnt = actual.func_param().size();
+ ArrayList values = new ArrayList<>(cnt);
+ for (int i = 0; i < cnt; i++) {
+ values.add(evalPop(actual.func_param(i).expr()));
+ }
+ return values;
+ }
+
+ @Override
+ public void addUserFunction(Create_function_stmtContext ctx) {
+ FuncNameInfo procedureName = new FuncNameInfo(
+ exec.logicalPlanBuilder.visitMultipartIdentifier(ctx.multipartIdentifier()));
+ if (builtinFunctions.exists(procedureName.toString())) {
+ exec.info(ctx, procedureName.toString() + " is a built-in function which cannot be redefined.");
+ return;
+ }
+ if (trace) {
+ trace(ctx, "CREATE FUNCTION " + procedureName.toString());
+ }
+ funcMap.put(procedureName.toString(), ctx);
+ }
+
+ @Override
+ public void addUserProcedure(Create_procedure_stmtContext ctx) {
+ FuncNameInfo procedureName = new FuncNameInfo(
+ exec.logicalPlanBuilder.visitMultipartIdentifier(ctx.multipartIdentifier()));
+ if (builtinFunctions.exists(procedureName.toString())) {
+ exec.info(ctx, procedureName.toString() + " is a built-in function which cannot be redefined.");
+ return;
+ }
+ if (trace) {
+ trace(ctx, "CREATE PROCEDURE " + procedureName.toString());
+ }
+ procMap.put(procedureName.toString(), ctx);
+ }
+
+ /**
+ * Evaluate the expression and pop value from the stack
+ */
+ private Var evalPop(ParserRuleContext ctx) {
+ exec.visit(ctx);
+ return exec.stackPop();
+ }
+
+ /**
+ * Execute rules
+ */
+ private Integer visit(ParserRuleContext ctx) {
+ return exec.visit(ctx);
+ }
+
+ private void trace(ParserRuleContext ctx, String message) {
+ if (trace) {
+ exec.trace(ctx, message);
+ }
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/metastore/PlsqlManager.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/metastore/PlsqlManager.java
new file mode 100644
index 00000000000000..b1ee73e544d464
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/metastore/PlsqlManager.java
@@ -0,0 +1,126 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package org.apache.doris.plsql.metastore;
+
+import org.apache.doris.catalog.Env;
+import org.apache.doris.common.io.Text;
+import org.apache.doris.common.io.Writable;
+import org.apache.doris.persist.gson.GsonUtils;
+
+import com.google.common.collect.Maps;
+import com.google.gson.annotations.SerializedName;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.Map;
+
+public class PlsqlManager implements Writable {
+ private static final Logger LOG = LogManager.getLogger(PlsqlManager.class);
+
+ @SerializedName(value = "nameToStoredProcedures")
+ Map nameToStoredProcedures = Maps.newConcurrentMap();
+
+ @SerializedName(value = "nameToPackages")
+ Map nameToPackages = Maps.newConcurrentMap();
+
+ public PlsqlManager() {
+ }
+
+ public PlsqlStoredProcedure getPlsqlStoredProcedure(PlsqlProcedureKey plsqlProcedureKey) {
+ return nameToStoredProcedures.get(plsqlProcedureKey);
+ }
+
+ public void addPlsqlStoredProcedure(PlsqlStoredProcedure procedure, boolean isForce) {
+ PlsqlProcedureKey plsqlProcedureKey = new PlsqlProcedureKey(procedure.getName(), procedure.getCatalogName(),
+ procedure.getDbName());
+ if (isForce) {
+ nameToStoredProcedures.put(plsqlProcedureKey, procedure);
+ } else if (nameToStoredProcedures.putIfAbsent(plsqlProcedureKey, procedure) != null) {
+ throw new RuntimeException(plsqlProcedureKey + ", stored procedure already exist.");
+ }
+ Env.getCurrentEnv().getEditLog().logAddPlsqlStoredProcedure(procedure);
+ LOG.info("Add stored procedure success: {}", plsqlProcedureKey);
+ }
+
+ public void replayAddPlsqlStoredProcedure(PlsqlStoredProcedure procedure) {
+ PlsqlProcedureKey plsqlProcedureKey = new PlsqlProcedureKey(procedure.getName(), procedure.getCatalogName(),
+ procedure.getDbName());
+ nameToStoredProcedures.put(plsqlProcedureKey, procedure);
+ LOG.info("Replay add stored procedure success: {}", plsqlProcedureKey);
+ }
+
+ public void dropPlsqlStoredProcedure(PlsqlProcedureKey plsqlProcedureKey) {
+ nameToStoredProcedures.remove(plsqlProcedureKey);
+ Env.getCurrentEnv().getEditLog().logDropPlsqlStoredProcedure(plsqlProcedureKey);
+ LOG.info("Drop stored procedure success: {}", plsqlProcedureKey);
+ }
+
+ public void replayDropPlsqlStoredProcedure(PlsqlProcedureKey plsqlProcedureKey) {
+ nameToStoredProcedures.remove(plsqlProcedureKey);
+ LOG.info("Replay drop stored procedure success: {}", plsqlProcedureKey);
+ }
+
+ public PlsqlPackage getPackage(PlsqlProcedureKey plsqlProcedureKey) {
+ return nameToPackages.get(plsqlProcedureKey);
+ }
+
+ public void addPackage(PlsqlPackage pkg, boolean isForce) {
+ PlsqlProcedureKey plsqlProcedureKey = new PlsqlProcedureKey(pkg.getName(), pkg.getCatalogName(),
+ pkg.getDbName());
+ nameToPackages.put(plsqlProcedureKey, pkg);
+ if (isForce) {
+ nameToPackages.put(plsqlProcedureKey, pkg);
+ } else if (nameToPackages.putIfAbsent(plsqlProcedureKey, pkg) != null) {
+ throw new RuntimeException(plsqlProcedureKey + ", package already exist.");
+ }
+ Env.getCurrentEnv().getEditLog().logAddPlsqlPackage(pkg);
+ LOG.info("Add plsql package success: {}", plsqlProcedureKey);
+ }
+
+ public void replayAddPlsqlPackage(PlsqlPackage pkg) {
+ PlsqlProcedureKey plsqlProcedureKey = new PlsqlProcedureKey(pkg.getName(), pkg.getCatalogName(),
+ pkg.getDbName());
+ nameToPackages.put(plsqlProcedureKey, pkg);
+ LOG.info("Replay add plsql package success: {}", plsqlProcedureKey);
+ }
+
+ public void dropPackage(PlsqlProcedureKey plsqlProcedureKey) {
+ nameToPackages.remove(plsqlProcedureKey);
+ Env.getCurrentEnv().getEditLog().logDropPlsqlPackage(plsqlProcedureKey);
+ LOG.info("Drop plsql package success: {}", plsqlProcedureKey);
+ }
+
+ public void replayDropPlsqlPackage(PlsqlProcedureKey plsqlProcedureKey) {
+ nameToPackages.remove(plsqlProcedureKey);
+ LOG.info("Replay drop plsql package success: {}", plsqlProcedureKey);
+ }
+
+ @Override
+ public void write(DataOutput out) throws IOException {
+ String json = GsonUtils.GSON.toJson(this);
+ Text.writeString(out, json);
+ }
+
+ public static PlsqlManager read(DataInput in) throws IOException {
+ String json = Text.readString(in);
+ return GsonUtils.GSON.fromJson(json, PlsqlManager.class);
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/metastore/PlsqlMetaClient.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/metastore/PlsqlMetaClient.java
new file mode 100644
index 00000000000000..86d4474ca76f1e
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/metastore/PlsqlMetaClient.java
@@ -0,0 +1,207 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package org.apache.doris.plsql.metastore;
+
+import org.apache.doris.catalog.Env;
+import org.apache.doris.common.ClientPool;
+import org.apache.doris.mysql.privilege.PrivPredicate;
+import org.apache.doris.qe.ConnectContext;
+import org.apache.doris.thrift.FrontendService;
+import org.apache.doris.thrift.TAddPlsqlPackageRequest;
+import org.apache.doris.thrift.TAddPlsqlStoredProcedureRequest;
+import org.apache.doris.thrift.TDropPlsqlPackageRequest;
+import org.apache.doris.thrift.TDropPlsqlStoredProcedureRequest;
+import org.apache.doris.thrift.TNetworkAddress;
+import org.apache.doris.thrift.TPlsqlPackage;
+import org.apache.doris.thrift.TPlsqlProcedureKey;
+import org.apache.doris.thrift.TPlsqlStoredProcedure;
+import org.apache.doris.thrift.TStatus;
+import org.apache.doris.thrift.TStatusCode;
+
+import org.apache.thrift.TException;
+
+import java.util.Objects;
+
+public class PlsqlMetaClient {
+ public PlsqlMetaClient() {
+ }
+
+ public void addPlsqlStoredProcedure(String name, String catalogName, String dbName, String ownerName, String source,
+ boolean isForce) {
+ checkPriv();
+ if (Env.getCurrentEnv().isMaster()) {
+ Env.getCurrentEnv().getPlsqlManager()
+ .addPlsqlStoredProcedure(new PlsqlStoredProcedure(name, catalogName, dbName, ownerName, source),
+ isForce);
+ } else {
+ addPlsqlStoredProcedureThrift(name, catalogName, dbName, ownerName, source, isForce);
+ }
+ }
+
+ public void dropPlsqlStoredProcedure(String name, String catalogName, String dbName) {
+ checkPriv();
+ if (Env.getCurrentEnv().isMaster()) {
+ Env.getCurrentEnv().getPlsqlManager()
+ .dropPlsqlStoredProcedure(new PlsqlProcedureKey(name, catalogName, dbName));
+ } else {
+ dropStoredProcedureThrift(name, catalogName, dbName);
+ }
+ }
+
+ public PlsqlStoredProcedure getPlsqlStoredProcedure(String name, String catalogName, String dbName) {
+ return Env.getCurrentEnv().getPlsqlManager()
+ .getPlsqlStoredProcedure(new PlsqlProcedureKey(name, catalogName, dbName));
+ }
+
+ public void addPlsqlPackage(String name, String catalogName, String dbName, String ownerName, String header,
+ String body) {
+ checkPriv();
+ if (Env.getCurrentEnv().isMaster()) {
+ Env.getCurrentEnv().getPlsqlManager()
+ .addPackage(new PlsqlPackage(name, catalogName, dbName, ownerName, header, body),
+ false);
+ } else {
+ addPlsqlPackageThrift(name, catalogName, dbName, ownerName, header, body);
+ }
+ }
+
+ public void dropPlsqlPackage(String name, String catalogName, String dbName) {
+ checkPriv();
+ if (Env.getCurrentEnv().isMaster()) {
+ Env.getCurrentEnv().getPlsqlManager().dropPackage(new PlsqlProcedureKey(name, catalogName, dbName));
+ } else {
+ dropPlsqlPackageThrift(name, catalogName, dbName);
+ }
+ }
+
+ public PlsqlPackage getPlsqlPackage(String name, String catalogName, String dbName) {
+ return Env.getCurrentEnv().getPlsqlManager().getPackage(new PlsqlProcedureKey(name, catalogName, dbName));
+ }
+
+ protected void addPlsqlStoredProcedureThrift(String name, String catalogName, String dbName, String ownerName,
+ String source, boolean isForce) {
+ TPlsqlStoredProcedure tPlsqlStoredProcedure = new TPlsqlStoredProcedure().setName(name)
+ .setCatalogName(catalogName)
+ .setDbName(dbName).setOwnerName(ownerName).setSource(source);
+ TAddPlsqlStoredProcedureRequest tAddPlsqlStoredProcedureRequest = new TAddPlsqlStoredProcedureRequest()
+ .setPlsqlStoredProcedure(tPlsqlStoredProcedure);
+ tAddPlsqlStoredProcedureRequest.setIsForce(isForce);
+
+ try {
+ sendUpdateRequest(tAddPlsqlStoredProcedureRequest,
+ (request, client) -> client.addPlsqlStoredProcedure(request).getStatus());
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ protected void dropStoredProcedureThrift(String name, String catalogName, String dbName) {
+ TPlsqlProcedureKey tPlsqlProcedureKey = new TPlsqlProcedureKey().setName(name).setCatalogName(catalogName)
+ .setDbName(dbName);
+ TDropPlsqlStoredProcedureRequest tDropPlsqlStoredProcedureRequest
+ = new TDropPlsqlStoredProcedureRequest().setPlsqlProcedureKey(
+ tPlsqlProcedureKey);
+
+ try {
+ sendUpdateRequest(tDropPlsqlStoredProcedureRequest,
+ (request, client) -> client.dropPlsqlStoredProcedure(request).getStatus());
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ protected void addPlsqlPackageThrift(String name, String catalogName, String dbName, String ownerName,
+ String header, String body) {
+ TPlsqlPackage tPlsqlPackage = new TPlsqlPackage().setName(name).setCatalogName(catalogName)
+ .setDbName(dbName).setOwnerName(ownerName).setHeader(header).setBody(body);
+ TAddPlsqlPackageRequest tAddPlsqlPackageRequest = new TAddPlsqlPackageRequest()
+ .setPlsqlPackage(tPlsqlPackage);
+
+ try {
+ sendUpdateRequest(tAddPlsqlPackageRequest,
+ (request, client) -> client.addPlsqlPackage(request).getStatus());
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ protected void dropPlsqlPackageThrift(String name, String catalogName, String dbName) {
+ TPlsqlProcedureKey tPlsqlProcedureKey = new TPlsqlProcedureKey().setName(name).setCatalogName(catalogName)
+ .setDbName(dbName);
+ TDropPlsqlPackageRequest tDropPlsqlPackageRequest = new TDropPlsqlPackageRequest().setPlsqlProcedureKey(
+ tPlsqlProcedureKey);
+
+ try {
+ sendUpdateRequest(tDropPlsqlPackageRequest,
+ (request, client) -> client.dropPlsqlPackage(request).getStatus());
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ private void checkPriv() {
+ if (!Env.getCurrentEnv().getAccessManager()
+ .checkGlobalPriv(ConnectContext.get().getCurrentUserIdentity(), PrivPredicate.ADMIN)) {
+ throw new RuntimeException(
+ "Access denied; you need (at least one of) the ADMIN privilege(s) for this operation");
+ }
+ }
+
+ private void sendUpdateRequest(Request request,
+ BiFunction sendRequest) throws Exception {
+ TNetworkAddress masterAddress = new TNetworkAddress(Env.getCurrentEnv().getMasterHost(),
+ Env.getCurrentEnv().getMasterRpcPort());
+ FrontendService.Client client = ClientPool.frontendPool.borrowObject(masterAddress);
+ TStatus status;
+ boolean isReturnToPool = true;
+ try {
+ status = sendRequest.apply(request, client);
+ checkResult(status);
+ } catch (Exception e) {
+ if (!ClientPool.frontendPool.reopen(client)) {
+ isReturnToPool = false;
+ throw e;
+ }
+
+ status = sendRequest.apply(request, client); // retry once
+ checkResult(status);
+ } finally {
+ if (isReturnToPool) {
+ ClientPool.frontendPool.returnObject(masterAddress, client);
+ } else {
+ ClientPool.frontendPool.invalidateObject(masterAddress, client);
+ }
+ }
+ }
+
+ private void checkResult(TStatus status) throws Exception {
+ if (Objects.isNull(status) || !status.isSetStatusCode()) {
+ throw new TException("Access master error, no status set.");
+ }
+ if (status.getStatusCode().equals(TStatusCode.OK)) {
+ return;
+ }
+ throw new Exception(
+ "Access fe error, code:" + status.getStatusCode().name() + ", mgs:" + status.getErrorMsgs());
+ }
+
+ @FunctionalInterface
+ public interface BiFunction {
+ R apply(T t, U u) throws Exception;
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/metastore/PlsqlPackage.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/metastore/PlsqlPackage.java
new file mode 100644
index 00000000000000..41114890c5837b
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/metastore/PlsqlPackage.java
@@ -0,0 +1,74 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package org.apache.doris.plsql.metastore;
+
+import org.apache.doris.common.io.Text;
+import org.apache.doris.common.io.Writable;
+import org.apache.doris.persist.gson.GsonUtils;
+import org.apache.doris.thrift.TPlsqlPackage;
+
+import com.google.gson.annotations.SerializedName;
+import lombok.AllArgsConstructor;
+import lombok.Getter;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+
+@AllArgsConstructor
+@Getter
+public class PlsqlPackage implements Writable {
+ @SerializedName(value = "name")
+ private String name;
+
+ @SerializedName(value = "catalogName")
+ private String catalogName;
+
+ @SerializedName(value = "dbName")
+ private String dbName;
+
+ @SerializedName(value = "ownerName")
+ private String ownerName;
+
+ @SerializedName(value = "header")
+ private String header;
+
+ @SerializedName(value = "body")
+ private String body;
+
+ public static PlsqlPackage read(DataInput in) throws IOException {
+ String json = Text.readString(in);
+ return GsonUtils.GSON.fromJson(json, PlsqlPackage.class);
+ }
+
+ public TPlsqlPackage toThrift() {
+ return new TPlsqlPackage().setName(name).setCatalogName(catalogName).setDbName(dbName).setOwnerName(ownerName)
+ .setHeader(header).setBody(body);
+ }
+
+ public static PlsqlPackage fromThrift(TPlsqlPackage pkg) {
+ return new PlsqlPackage(pkg.getName(), pkg.getCatalogName(), pkg.getDbName(), pkg.getOwnerName(),
+ pkg.getHeader(), pkg.getBody());
+ }
+
+ @Override
+ public void write(DataOutput out) throws IOException {
+ String json = GsonUtils.GSON.toJson(this);
+ Text.writeString(out, json);
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/metastore/PlsqlProcedureKey.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/metastore/PlsqlProcedureKey.java
new file mode 100644
index 00000000000000..c2472c073828f1
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/metastore/PlsqlProcedureKey.java
@@ -0,0 +1,90 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package org.apache.doris.plsql.metastore;
+
+import org.apache.doris.common.io.Text;
+import org.apache.doris.common.io.Writable;
+import org.apache.doris.persist.gson.GsonUtils;
+import org.apache.doris.thrift.TPlsqlProcedureKey;
+
+import com.google.gson.annotations.SerializedName;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.Objects;
+
+public class PlsqlProcedureKey implements Writable {
+ private static final Logger LOG = LogManager.getLogger(PlsqlProcedureKey.class);
+
+ @SerializedName(value = "name")
+ private String name;
+
+ @SerializedName(value = "catalogName")
+ private String catalogName;
+
+ @SerializedName(value = "dbName")
+ private String dbName;
+
+ public PlsqlProcedureKey(String name, String catalogName, String dbName) {
+ this.name = name;
+ this.catalogName = catalogName;
+ this.dbName = dbName;
+ }
+
+ public TPlsqlProcedureKey toThrift() {
+ return new TPlsqlProcedureKey().setName(name).setCatalogName(catalogName).setDbName(dbName);
+ }
+
+ public static PlsqlProcedureKey fromThrift(TPlsqlProcedureKey key) {
+ return new PlsqlProcedureKey(key.getName(), key.getCatalogName(), key.getDbName());
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name, catalogName, dbName);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (!(obj instanceof PlsqlProcedureKey)) {
+ return false;
+ }
+ return Objects.equals(this.name, ((PlsqlProcedureKey) obj).name) && Objects.equals(this.catalogName,
+ ((PlsqlProcedureKey) obj).catalogName)
+ && Objects.equals(this.dbName, ((PlsqlProcedureKey) obj).dbName);
+ }
+
+ @Override
+ public String toString() {
+ return "name:" + name + ", catalogName:" + catalogName + ", dbName:" + dbName;
+ }
+
+ @Override
+ public void write(DataOutput out) throws IOException {
+ String json = GsonUtils.GSON.toJson(this);
+ Text.writeString(out, json);
+ }
+
+ public static PlsqlProcedureKey read(DataInput in) throws IOException {
+ String json = Text.readString(in);
+ return GsonUtils.GSON.fromJson(json, PlsqlProcedureKey.class);
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/metastore/PlsqlStoredProcedure.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/metastore/PlsqlStoredProcedure.java
new file mode 100644
index 00000000000000..b181b19473d5a5
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/metastore/PlsqlStoredProcedure.java
@@ -0,0 +1,71 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package org.apache.doris.plsql.metastore;
+
+import org.apache.doris.common.io.Text;
+import org.apache.doris.common.io.Writable;
+import org.apache.doris.persist.gson.GsonUtils;
+import org.apache.doris.thrift.TPlsqlStoredProcedure;
+
+import com.google.gson.annotations.SerializedName;
+import lombok.AllArgsConstructor;
+import lombok.Getter;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+
+@AllArgsConstructor
+@Getter
+public class PlsqlStoredProcedure implements Writable {
+ @SerializedName(value = "name")
+ private String name;
+
+ @SerializedName(value = "catalogName")
+ private String catalogName;
+
+ @SerializedName(value = "dbName")
+ private String dbName;
+
+ @SerializedName(value = "ownerName")
+ private String ownerName;
+
+ @SerializedName(value = "source")
+ private String source;
+
+ public TPlsqlStoredProcedure toThrift() {
+ return new TPlsqlStoredProcedure().setName(name).setCatalogName(catalogName).setDbName(dbName)
+ .setOwnerName(ownerName).setSource(source);
+ }
+
+ public static PlsqlStoredProcedure fromThrift(TPlsqlStoredProcedure procedure) {
+ return new PlsqlStoredProcedure(procedure.getName(), procedure.getCatalogName(), procedure.getDbName(),
+ procedure.getOwnerName(), procedure.source);
+ }
+
+ @Override
+ public void write(DataOutput out) throws IOException {
+ String json = GsonUtils.GSON.toJson(this);
+ Text.writeString(out, json);
+ }
+
+ public static PlsqlStoredProcedure read(DataInput in) throws IOException {
+ String json = Text.readString(in);
+ return GsonUtils.GSON.fromJson(json, PlsqlStoredProcedure.class);
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/objects/DbmOutput.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/objects/DbmOutput.java
new file mode 100644
index 00000000000000..e7377f25364dfb
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/objects/DbmOutput.java
@@ -0,0 +1,51 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/objects/DbmOutput.java
+// and modified by Doris
+
+package org.apache.doris.plsql.objects;
+
+import org.apache.doris.plsql.Console;
+import org.apache.doris.plsql.Var;
+
+import java.util.List;
+
+public class DbmOutput implements PlObject {
+ private final PlClass plClass;
+ private Console console;
+
+ public DbmOutput(PlClass plClass) {
+ this.plClass = plClass;
+ }
+
+ public void initialize(Console console) {
+ this.console = console;
+ }
+
+ @Override
+ public PlClass plClass() {
+ return plClass;
+ }
+
+ public Var putLine(List params) {
+ if (!params.isEmpty()) {
+ console.printLine(params.get(0).toString());
+ }
+ return null;
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/objects/DbmOutputClass.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/objects/DbmOutputClass.java
new file mode 100644
index 00000000000000..769abebdffd850
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/objects/DbmOutputClass.java
@@ -0,0 +1,46 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/objects/DbmOutputClass.java
+// and modified by Doris
+
+package org.apache.doris.plsql.objects;
+
+import org.apache.doris.plsql.objects.MethodParams.Arity;
+
+public class DbmOutputClass implements PlClass {
+ public static final DbmOutputClass INSTANCE = new DbmOutputClass();
+ private final org.apache.doris.plsql.objects.MethodDictionary methodDictionary
+ = new org.apache.doris.plsql.objects.MethodDictionary();
+
+ private DbmOutputClass() {
+ methodDictionary.put("put_line", (self, args) -> {
+ Arity.UNARY.check("put_line", args);
+ return self.putLine(args);
+ });
+ }
+
+ @Override
+ public DbmOutput newInstance() {
+ return new DbmOutput(this);
+ }
+
+ @Override
+ public org.apache.doris.plsql.objects.MethodDictionary methodDictionary() {
+ return methodDictionary;
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/objects/Method.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/objects/Method.java
new file mode 100644
index 00000000000000..a3c9a68944a85d
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/objects/Method.java
@@ -0,0 +1,29 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/objects/Method.java
+// and modified by Doris
+
+package org.apache.doris.plsql.objects;
+
+import org.apache.doris.plsql.Var;
+
+import java.util.List;
+
+public interface Method {
+ Var call(T self, List args);
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/objects/MethodDictionary.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/objects/MethodDictionary.java
new file mode 100644
index 00000000000000..6f919a6dd814aa
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/objects/MethodDictionary.java
@@ -0,0 +1,46 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/objects/MethodDictionary.java
+// and modified by Doris
+
+package org.apache.doris.plsql.objects;
+
+import org.apache.doris.plsql.exception.NoSuchPlMethodException;
+
+import org.antlr.v4.runtime.ParserRuleContext;
+
+import java.util.HashMap;
+import java.util.Map;
+
+public class MethodDictionary {
+ public static final String __GETITEM__ = "__GETITEM__";
+ public static final String __SETITEM__ = "__SETITEM__";
+ private final Map> dict = new HashMap<>();
+
+ public void put(String methodName, Method method) {
+ dict.put(methodName.toUpperCase(), method);
+ }
+
+ public Method get(ParserRuleContext ctx, String methodName) {
+ Method result = dict.get(methodName.toUpperCase());
+ if (result == null) {
+ throw new NoSuchPlMethodException(ctx, "No such method " + methodName);
+ }
+ return result;
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/objects/MethodParams.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/objects/MethodParams.java
new file mode 100644
index 00000000000000..aef072f3b53da7
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/objects/MethodParams.java
@@ -0,0 +1,96 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/objects/MethodParams.java
+// and modified by Doris
+
+package org.apache.doris.plsql.objects;
+
+import org.apache.doris.plsql.File;
+import org.apache.doris.plsql.Row;
+import org.apache.doris.plsql.Var;
+import org.apache.doris.plsql.exception.ArityException;
+import org.apache.doris.plsql.exception.TypeException;
+
+import java.util.List;
+
+public class MethodParams {
+ private final List actual;
+
+ public MethodParams(String methodName, List actual, Arity arity) {
+ this.actual = actual;
+ arity.check(methodName, actual);
+ }
+
+ public Long longAt(int nth) {
+ return at(nth, Long.class);
+ }
+
+ public Row rowAt(int nth) {
+ return at(nth, Row.class);
+ }
+
+ public String stringAt(int nth) {
+ return at(nth, String.class);
+ }
+
+ public File fileAt(int nth) {
+ return at(nth, File.class);
+ }
+
+ public T at(int nth, Class clazz) {
+ try {
+ return clazz.cast(actual.get(nth).value);
+ } catch (ClassCastException e) {
+ throw new TypeException(null, clazz, actual.get(nth).type, actual.get(nth).value);
+ }
+ }
+
+ public interface Arity {
+ void check(String methodName, List> params);
+
+ Arity NULLARY = Arity.of(0);
+ Arity UNARY = Arity.of(1);
+ Arity BINARY = Arity.of(2);
+
+ static Arity of(int count) {
+ return (methodName, params) -> {
+ if (params.size() != count) {
+ throw new ArityException(null, methodName, count, params.size());
+ }
+ };
+ }
+
+ static Arity min(int count) {
+ return (methodName, params) -> {
+ if (params.size() < count) {
+ throw new ArityException(null, "wrong number of arguments in call to '" + methodName
+ + "'. Expected at least " + count + " got " + params.size() + ".");
+ }
+ };
+ }
+
+ static Arity max(int count) {
+ return (methodName, params) -> {
+ if (params.size() > count) {
+ throw new ArityException(null, "wrong number of arguments in call to '" + methodName
+ + "'. Expected at most " + count + " got " + params.size() + ".");
+ }
+ };
+ }
+ }
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/objects/PlClass.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/objects/PlClass.java
new file mode 100644
index 00000000000000..38f8a39b2db23a
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/objects/PlClass.java
@@ -0,0 +1,27 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/objects/HplClass.java
+// and modified by Doris
+
+package org.apache.doris.plsql.objects;
+
+public interface PlClass {
+ PlObject newInstance();
+
+ org.apache.doris.plsql.objects.MethodDictionary methodDictionary();
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/objects/PlObject.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/objects/PlObject.java
new file mode 100644
index 00000000000000..1216cd319bc241
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/objects/PlObject.java
@@ -0,0 +1,25 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/objects/HplObject.java
+// and modified by Doris
+
+package org.apache.doris.plsql.objects;
+
+public interface PlObject {
+ PlClass plClass();
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/plsql/objects/Table.java b/fe/fe-core/src/main/java/org/apache/doris/plsql/objects/Table.java
new file mode 100644
index 00000000000000..f24bff36e1216a
--- /dev/null
+++ b/fe/fe-core/src/main/java/org/apache/doris/plsql/objects/Table.java
@@ -0,0 +1,225 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+// This file is copied from
+// https://github.com/apache/hive/blob/master/hplsql/src/main/java/org/apache/hive/hplsql/objects/Table.java
+// and modified by Doris
+
+package org.apache.doris.plsql.objects;
+
+import org.apache.doris.common.AnalysisException;
+import org.apache.doris.plsql.ColumnDefinition;
+import org.apache.doris.plsql.Row;
+import org.apache.doris.plsql.Var;
+import org.apache.doris.plsql.executor.QueryResult;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Oracle's PL/SQL Table/associative array.
+ *
+ * Tables can be modelled after a corresponding Hive table or they can be created manually.
+ *
+ * 1. Model the table after the emp Hive table
+ * TYPE t_tab IS TABLE OF emp%ROWTYPE INDEX BY BINARY_INTEGER;
+ *
+ * 2. Model the table after a column of a Hive table (emp.name). This table will hold a single column only.
+ * TYPE t_tab IS TABLE OF emp.name%TYPE INDEX BY BINARY_INTEGER;
+ *
+ * 3. Or you can specify the column manually. This table will hold one column only.
+ * TYPE t_tab IS TABLE OF NUMBER INDEX BY BINARY_INTEGER;
+ *
+ * In the first case the values will be records where each key in the record matches the columns to the corresponding
+ * table.
+ * tab(key).col_name;
+ *
+ * In the last two cases the values will represent scalars, but they stored in a record with a single key.
+ * tab(key)
+ *
+ * Iteration logic uses the first/last next and prior methods.
+ * First/last returns a key, next/prior gives back the next or previous key based on the key passed in.
+ */
+public class Table implements PlObject {
+ private final TableClass plClass;
+ private final Map