Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions docs/en/administrator-guide/export-manual.md
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,7 @@ WITH BROKER "hdfs"
```

* `column_separator`: Column separator. The default is `\t`. Supports invisible characters, such as'\x07'.
* `column`: columns to be exported, separated by commas, if this parameter is not filled in, all columns of the table will be exported by default.
* `line_delimiter`: Line separator. The default is `\n`. Supports invisible characters, such as'\x07'.
* `exec_mem_limit`: Represents the memory usage limitation of a query plan on a single BE in an Export job. Default 2GB. Unit bytes.
* `timeout`: homework timeout. Default 2 hours. Unit seconds.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ under the License.

The following parameters can be specified:
column_separator: Specifies the exported column separator, defaulting to t. Supports invisible characters, such as'\x07'.
column: Specify the columns to be exported, separated by commas. If you do not fill in this parameter, the default is to export all the columns of the table.
line_delimiter: Specifies the exported line separator, defaulting to\n. Supports invisible characters, such as'\x07'.
exec_mem_limit: Exports the upper limit of memory usage for a single BE node, defaulting to 2GB in bytes.
timeout: The time-out for importing jobs is 1 day by default, in seconds.
Expand Down Expand Up @@ -92,5 +93,8 @@ under the License.
6. Export all data in the testTbl table to hdfs, using the invisible character "\x07" as the column and row separator.
EXPORT TABLE testTbl TO "hdfs://hdfs_host:port/a/b/c" PROPERTIES ("column_separator"="\\x07", "line_delimiter" = "\\x07") WITH BROKER "broker_name" ("username"="xxx", "password"="yyy")

7. Export column k1, v1 from the testTbl to the local.
EXPORT TABLE testTbl TO "file:///home/data/a" PROPERTIES ("columns" = "k1,v1");

## keyword
EXPORT
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ under the License.

可以指定如下参数:
column_separator: 指定导出的列分隔符,默认为\t。支持不可见字符,比如 '\x07'。
column: 指定待导出的列,使用英文逗号隔开,如果不填这个参数默认是导出表的所有列。
line_delimiter: 指定导出的行分隔符,默认为\n。支持不可见字符,比如 '\x07'。
exec_mem_limit: 导出在单个 BE 节点的内存使用上限,默认为 2GB,单位为字节。
timeout:导入作业的超时时间,默认为1天,单位是秒。
Expand Down Expand Up @@ -91,6 +92,9 @@ under the License.

6. 将 testTbl 表中的所有数据导出到 hdfs 上,以不可见字符 "\x07" 作为列或者行分隔符。
EXPORT TABLE testTbl TO "hdfs://hdfs_host:port/a/b/c" PROPERTIES ("column_separator"="\\x07", "line_delimiter" = "\\x07") WITH BROKER "broker_name" ("username"="xxx", "password"="yyy")

7. 将 testTbl 表的 k1, v1 列导出到本地。
EXPORT TABLE testTbl TO "file:///home/data/a" PROPERTIES ("columns" = "k1,v1");

## keyword
EXPORT
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ public boolean needAuditEncryption() {
}

@Override
public void analyze(Analyzer analyzer) throws AnalysisException, UserException {
public void analyze(Analyzer analyzer) throws UserException {
super.analyze(analyzer);

tableRef = analyzer.resolveTableRef(tableRef);
Expand Down
28 changes: 16 additions & 12 deletions fe/fe-core/src/main/java/org/apache/doris/load/ExportJob.java
Original file line number Diff line number Diff line change
Expand Up @@ -78,11 +78,11 @@
import org.apache.doris.thrift.TUniqueId;

import com.google.common.base.Preconditions;
import com.google.common.base.Splitter;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.google.common.base.Splitter;

import org.apache.commons.lang.StringUtils;
import org.apache.logging.log4j.LogManager;
Expand Down Expand Up @@ -268,18 +268,22 @@ private void registerToDesc() {
exportTupleDesc = desc.createTupleDescriptor();
exportTupleDesc.setTable(exportTable);
exportTupleDesc.setRef(tableRef);
for (Column col : exportTable.getBaseSchema()) {
String colName = col.getName().toLowerCase();
if (!this.exportColumns.isEmpty() && this.exportColumns.contains(colName)) {
SlotDescriptor slot = desc.addSlotDescriptor(exportTupleDesc);
slot.setIsMaterialized(true);
slot.setColumn(col);
slot.setIsNullable(col.isAllowNull());
} else {
if (exportColumns.isEmpty()) {
for (Column column : exportTable.getBaseSchema()) {
SlotDescriptor slot = desc.addSlotDescriptor(exportTupleDesc);
slot.setIsMaterialized(true);
slot.setColumn(col);
slot.setIsNullable(col.isAllowNull());
slot.setColumn(column);
slot.setIsNullable(column.isAllowNull());
}
} else {
for (Column column : exportTable.getBaseSchema()) {
String colName = column.getName().toLowerCase();
if (exportColumns.contains(colName)) {
SlotDescriptor slot = desc.addSlotDescriptor(exportTupleDesc);
slot.setIsMaterialized(true);
slot.setColumn(column);
slot.setIsNullable(column.isAllowNull());
}
}
}
desc.computeMemLayout();
Expand Down Expand Up @@ -765,7 +769,7 @@ public void readFields(DataInput in) throws IOException {
this.columns = this.properties.get(LoadStmt.KEY_IN_PARAM_COLUMNS);
if (!Strings.isNullOrEmpty(this.columns)) {
Splitter split = Splitter.on(',').trimResults().omitEmptyStrings();
this.exportColumns = split.splitToList(this.columns);
this.exportColumns = split.splitToList(this.columns.toLowerCase());
}
boolean hasPartition = in.readBoolean();
if (hasPartition) {
Expand Down