Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ public void analyze(Analyzer analyzer) throws AnalysisException {
} else {
// TODO(wyb): spark-load
if (!Config.enable_spark_load) {
throw new AnalysisException("REVOKE ON RESOURCE is comming soon");
throw new AnalysisException("REVOKE ON RESOURCE is coming soon");
}
resourcePattern.analyze();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -260,7 +260,7 @@ public TupleId getId() {
}

/**
* Return the list of of materialized tuple ids from the TableRef.
* Return the list of materialized tuple ids from the TableRef.
* This method should only be called after the TableRef has been analyzed.
*/
public List<TupleId> getMaterializedTupleIds() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -658,7 +658,7 @@ public TPrimitiveType getTPrimitiveType(TTypeDesc ttype) {
* For datetime types this is the length in characters of the String representation
* (assuming the maximum allowed precision of the fractional seconds component).
* For binary data this is the length in bytes.
* Null is returned for for data types where the column size is not applicable.
* Null is returned for data types where the column size is not applicable.
*/
public Integer getColumnSize() {
if (!isScalarType()) return null;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -947,7 +947,7 @@ public enum ErrorCode {
"past and ON COMPLETION NOT PRESERVE is set. The event was dropped immediately after creation."),
ERR_EVENT_CANNOT_ALTER_IN_THE_PAST(1589, new byte[]{'H', 'Y', '0', '0', '0'}, "Event execution time is in the " +
"past and ON COMPLETION NOT PRESERVE is set. The event was dropped immediately after creation."),
ERR_SLAVE_INCIDENT(1590, new byte[]{'H', 'Y', '0', '0', '0'}, "The incident %s occured on the master. Message: %s"),
ERR_SLAVE_INCIDENT(1590, new byte[]{'H', 'Y', '0', '0', '0'}, "The incident %s occurred on the master. Message: %s"),
ERR_NO_PARTITION_FOR_GIVEN_VALUE_SILENT(1591, new byte[]{'H', 'Y', '0', '0', '0'}, "Table has no partition for " +
"some existing values"),
ERR_BINLOG_UNSAFE_STATEMENT(1592, new byte[]{'H', 'Y', '0', '0', '0'}, "Unsafe statement written to the binary " +
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@ public Object submit(

/**
* Get all uploaded file or specified file
* If preview is true, also return the the preview of the file
* If preview is true, also return the preview of the file
* @param ns
* @param dbName
* @param tblName
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -370,7 +370,7 @@ private Set<Long> submitPushTasks(LoadJob job, Database db) {
OlapTable table = (OlapTable) db.getTableNullable(tableId);
if (table == null) {
LOG.warn("table does not exist. id: {}", tableId);
// if table is dropped during load, the the job is failed
// if table is dropped during load, the job is failed
return null;
}
TableLoadInfo tableLoadInfo = tableEntry.getValue();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ private static byte[] xorCrypt(byte[] s1, byte[] s2) {
}

// Check that scrambled message corresponds to the password; the function
// is used by server to check that recieved reply is authentic.
// is used by server to check that received reply is authentic.
// This function does not check lengths of given strings: message must be
// null-terminated, reply and hash_stage2 must be at least SHA1_HASH_SIZE
// long (if not, something fishy is going on).
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1378,7 +1378,7 @@ private PlanNode createInlineViewPlan(Analyzer analyzer, InlineViewRef inlineVie
rootNode.setWithoutTupleIsNullOutputSmap(outputSmap);
// Exprs against non-matched rows of an outer join should always return NULL.
// Make the rhs exprs of the output smap nullable, if necessary. This expr wrapping
// must be performed on the composed smap, and not on the the inline view's smap,
// must be performed on the composed smap, and not on the inline view's smap,
// because the rhs exprs must first be resolved against the physical output of
// 'planRoot' to correctly determine whether wrapping is necessary.
List<Expr> nullableRhs = TupleIsNullPredicate.wrapExprs(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -254,8 +254,8 @@ public void init(Analyzer analyzer) throws UserException {

// Remap the ordering exprs to the tuple materialized by this sort node. The mapping
// is a composition of the childSmap and the outputSmap_ because the child node may
// have also remapped its input (e.g., as in a a series of (sort->analytic)* nodes).
// Parent nodes have have to do the same so set the composition as the outputSmap_.
// have also remapped its input (e.g., as in a series of (sort->analytic)* nodes).
// Parent nodes have to do the same so set the composition as the outputSmap_.
outputSmap = ExprSubstitutionMap.compose(childSmap, outputSmap, analyzer);
info.substituteOrderingExprs(outputSmap, analyzer);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ public int getRpcPort() {
private boolean isInit = false;

// init the fe process. This must be called before starting the frontend process.
// 1. check if all neccessary environment variables are set.
// 1. check if all necessary environment variables are set.
// 2. clear and create 3 dirs: runningDir/log/, runningDir/palo-meta/, runningDir/conf/
// 3. init fe.conf
// The content of "fe.conf" is a merge set of input `feConf` and MIN_FE_CONF
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -989,7 +989,7 @@ private void process() throws Exception {
}
}

// get key column names and value column names seperately
// get key column names and value column names separately
List<String> keyColumnNames = new ArrayList<>();
List<String> valueColumnNames = new ArrayList<>();
for (EtlJobConfig.EtlColumn etlColumn : baseIndex.columns) {
Expand Down