Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions docs/content/migration/iceberg-compatibility.md
Original file line number Diff line number Diff line change
Expand Up @@ -389,6 +389,12 @@ you also need to set some (or all) of the following table options when creating
<td>String</td>
<td>Hive client class name for Iceberg Hive Catalog.</td>
</tr>
<tr>
<td><h5>metadata.iceberg.glue.skip-archive</h5></td>
<td style="word-wrap: break-word;">false</td>
<td>Boolean</td>
<td>Skip archive for AWS Glue catalog.</td>
</tr>
</tbody>
</table>

Expand Down
6 changes: 6 additions & 0 deletions docs/layouts/shortcodes/generated/iceberg_configuration.html
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,12 @@
<td>String</td>
<td>Metastore database name for Iceberg Catalog. Set this as an iceberg database alias if using a centralized Catalog.</td>
</tr>
<tr>
<td><h5>metadata.iceberg.glue.skip-archive</h5></td>
<td style="word-wrap: break-word;">false</td>
<td>Boolean</td>
<td>Skip archive for AWS Glue catalog.</td>
</tr>
<tr>
<td><h5>metadata.iceberg.hadoop-conf-dir</h5></td>
<td style="word-wrap: break-word;">(none)</td>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,12 @@ public class IcebergOptions {
"Metastore table name for Iceberg Catalog."
+ "Set this as an iceberg table alias if using a centralized Catalog.");

public static final ConfigOption<Boolean> GLUE_SKIP_ARCHIVE =
key("metadata.iceberg.glue.skip-archive")
.booleanType()
.defaultValue(false)
.withDescription("Skip archive for AWS Glue catalog.");

/** Where to store Iceberg metadata. */
public enum StorageType implements DescribedEnum {
DISABLED("disabled", "Disable Iceberg compatibility support."),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,11 @@
import org.apache.paimon.utils.Preconditions;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.common.StatsSetupConst;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.EnvironmentContext;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
import org.apache.hadoop.hive.metastore.api.SerDeInfo;
Expand Down Expand Up @@ -145,9 +147,20 @@ private void commitMetadataImpl(Path newMetadataPath, @Nullable Path baseMetadat
.put("previous_metadata_location", baseMetadataPath.toString());
}

Options options = new Options(table.options());
boolean skipAWSGlueArchive = options.get(IcebergOptions.GLUE_SKIP_ARCHIVE);
EnvironmentContext environmentContext = new EnvironmentContext();
environmentContext.putToProperties(StatsSetupConst.CASCADE, StatsSetupConst.TRUE);
environmentContext.putToProperties(
"skipAWSGlueArchive", Boolean.toString(skipAWSGlueArchive));

clients.execute(
client ->
client.alter_table(icebergHiveDatabase, icebergHiveTable, hiveTable, true));
client.alter_table_with_environmentContext(
icebergHiveDatabase,
icebergHiveTable,
hiveTable,
environmentContext));
}

private boolean databaseExists(String databaseName) throws Exception {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,8 @@ public void testAppendOnlyTable() throws Exception {
tEnv.executeSql("CREATE DATABASE my_paimon.test_db");
tEnv.executeSql(
"CREATE TABLE my_paimon.test_db.t ( pt INT, id INT, data STRING ) PARTITIONED BY (pt) WITH "
+ "( 'metadata.iceberg.storage' = 'hive-catalog', 'metadata.iceberg.uri' = '', 'file.format' = 'avro' )");
+ "( 'metadata.iceberg.storage' = 'hive-catalog', 'metadata.iceberg.uri' = '', 'file.format' = 'avro',"
+ " 'metadata.iceberg.glue.skip-archive' = 'true' )");
tEnv.executeSql(
"INSERT INTO my_paimon.test_db.t VALUES "
+ "(1, 1, 'apple'), (1, 2, 'pear'), (2, 1, 'cat'), (2, 2, 'dog')")
Expand Down
Loading