Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
26 commits
Select commit Hold shift + click to select a range
3f4ec33
[cdc] Update flink.cdc.version to 3.5.0.
lvyanquan Sep 28, 2025
084e709
[cdc] Add PaimonMetadataApplier.
lvyanquan Sep 29, 2025
a1b714d
[cdc] Add TypeConverter and DataConverter.
lvyanquan Sep 30, 2025
57d93ae
[cdc] Add TypeConverter and DataConverter.
lvyanquan Sep 30, 2025
737f184
[cdc] Add TypeConverter and DataConverter.
lvyanquan Sep 30, 2025
d15102f
[cdc] Add TypeConverter and DataConverter.
lvyanquan Sep 30, 2025
6ae954c
Fix unstable CI
yuxiqian Oct 28, 2025
6751a33
Polish error message
yuxiqian Nov 5, 2025
94f158e
Fix fieldGetters' output
yuxiqian Nov 5, 2025
64bee70
Optimize DataConverterTest & TypeConverterTest
yuxiqian Nov 5, 2025
4e9e7db
Fix Typo
yuxiqian Nov 5, 2025
a7606ae
Revert log configuration changes
yuxiqian Nov 5, 2025
05a0c58
Add partition keys into PK suite, too
yuxiqian Nov 5, 2025
7465922
Rename PaimonMetadataApplierTest case to avoid confusion
yuxiqian Nov 5, 2025
0983938
Set Future get timeout to avoid infinite waiting
yuxiqian Nov 5, 2025
4349eb8
Remove unused methods
yuxiqian Nov 5, 2025
714d80a
Reap jobs properly
yuxiqian Nov 5, 2025
2c944cc
Revert incorrect fixes
yuxiqian Nov 5, 2025
4353d1f
Avoid eagerly clearing connection pools
yuxiqian Nov 5, 2025
c605211
Fix Postgres connection pool clearing issue, too
yuxiqian Nov 5, 2025
cd5d53a
Dynamically load FlinkCatalogFactory
yuxiqian Nov 6, 2025
b9f7f1d
Update JavaDocs
yuxiqian Nov 6, 2025
a117271
Remove redundant arguments
yuxiqian Nov 6, 2025
461d3e6
Modify row type
yuxiqian Nov 6, 2025
2fa9940
Tweak DataConvertTest
yuxiqian Nov 6, 2025
6a2f8c6
Address comments
yuxiqian Nov 10, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions paimon-flink/paimon-flink-cdc/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,8 @@ under the License.

<properties>
<flink.version>1.20.1</flink.version>
<flink.cdc.version>3.1.1</flink.cdc.version>
<flink.mongodb.cdc.version>3.1.1</flink.mongodb.cdc.version>
<flink.cdc.version>3.5.0</flink.cdc.version>
<flink.mongodb.cdc.version>3.5.0</flink.mongodb.cdc.version>
<avro.version>1.11.4</avro.version>
<geometry.version>2.2.0</geometry.version>
<json-path.version>2.9.0</json-path.version>
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.flink.cdc.connectors.base.relational.connection;

import com.zaxxer.hikari.HikariDataSource;
import org.apache.flink.cdc.common.annotation.VisibleForTesting;
import org.apache.flink.cdc.connectors.base.config.JdbcSourceConfig;
import org.apache.flink.util.FlinkRuntimeException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.IOException;
import java.util.HashMap;
import java.util.Map;

/**
* Copied from <a
* href="https://github.com/apache/flink-cdc/blob/release-3.5.0/flink-cdc-connect/flink-cdc-source-connectors/flink-cdc-base/src/main/java/org/apache/flink/cdc/connectors/base/relational/connection/JdbcConnectionPools.java">Flink
* CDC 3.5.0 resemblance</a>. Modified method {@link JdbcConnectionPools#clear()} at line 92 ~ 94.
*/
public class JdbcConnectionPools implements ConnectionPools<HikariDataSource, JdbcSourceConfig> {

private static final Logger LOG = LoggerFactory.getLogger(JdbcConnectionPools.class);

private static JdbcConnectionPools instance;
private final Map<ConnectionPoolId, HikariDataSource> pools = new HashMap<>();
private static final Map<String, JdbcConnectionPoolFactory> POOL_FACTORY_MAP = new HashMap<>();

private JdbcConnectionPools() {}

public static synchronized JdbcConnectionPools getInstance(
JdbcConnectionPoolFactory jdbcConnectionPoolFactory) {
if (instance == null) {
instance = new JdbcConnectionPools();
}
POOL_FACTORY_MAP.put(
jdbcConnectionPoolFactory.getClass().getName(), jdbcConnectionPoolFactory);
return instance;
}

@Override
public HikariDataSource getOrCreateConnectionPool(
ConnectionPoolId poolId, JdbcSourceConfig sourceConfig) {
synchronized (pools) {
if (!pools.containsKey(poolId)) {
LOG.info("Create and register connection pool {}", poolId);
JdbcConnectionPoolFactory jdbcConnectionPoolFactory =
POOL_FACTORY_MAP.get(poolId.getDataSourcePoolFactoryIdentifier());
if (jdbcConnectionPoolFactory == null) {
throw new FlinkRuntimeException(
String.format(
"Pool factory identifier is required for connection pool, but unknown pool factory identifier %s found.",
poolId.getDataSourcePoolFactoryIdentifier()));
}
pools.put(poolId, jdbcConnectionPoolFactory.createPooledDataSource(sourceConfig));
}
return pools.get(poolId);
}
}

/** this method is only supported for test. */
@VisibleForTesting
public String getJdbcUrl(
JdbcSourceConfig sourceConfig, String dataSourcePoolFactoryIdentifier) {
JdbcConnectionPoolFactory jdbcConnectionPoolFactory =
POOL_FACTORY_MAP.get(dataSourcePoolFactoryIdentifier);
if (jdbcConnectionPoolFactory == null) {
throw new FlinkRuntimeException(
String.format(
"Pool factory identifier is required for connection pools, but unknown pool factory identifier %s found.",
dataSourcePoolFactoryIdentifier));
}
return jdbcConnectionPoolFactory.getJdbcUrl(sourceConfig);
}

public void clear() throws IOException {
// See org.apache.flink.cdc.connectors.mysql.source.connection.JdbcConnectionPools#clear.
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.flink.cdc.connectors.mysql.source.connection;

import com.zaxxer.hikari.HikariDataSource;
import org.apache.flink.cdc.connectors.mysql.source.config.MySqlSourceConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.IOException;
import java.util.HashMap;
import java.util.Map;

/**
* Copied from <a
* href="https://github.com/apache/flink-cdc/blob/release-3.5.0/flink-cdc-connect/flink-cdc-source-connectors/flink-connector-mysql-cdc/src/main/java/org/apache/flink/cdc/connectors/mysql/source/connection/JdbcConnectionPools.java">Flink
* CDC 3.5.0 resemblance</a>. Modified method {@link JdbcConnectionPools#clear()} at line 60 ~ 66.
*/
public class JdbcConnectionPools implements ConnectionPools {

private static final Logger LOG = LoggerFactory.getLogger(JdbcConnectionPools.class);

private static final JdbcConnectionPools INSTANCE = new JdbcConnectionPools();
private final Map<ConnectionPoolId, HikariDataSource> pools = new HashMap<>();

private JdbcConnectionPools() {}

public static JdbcConnectionPools getInstance() {
return INSTANCE;
}

@Override
public HikariDataSource getOrCreateConnectionPool(
ConnectionPoolId poolId, MySqlSourceConfig sourceConfig) {
synchronized (pools) {
if (!pools.containsKey(poolId)) {
LOG.info("Create and register connection pool {}", poolId);
pools.put(poolId, PooledDataSourceFactory.createPooledDataSource(sourceConfig));
}
return pools.get(poolId);
}
}

public void clear() throws IOException {
// Intentionally no-op.
//
// Flink CDC 3.2+ automatically clears connection pools to avoid connection leakage.
// However, this might accidentally affect two Paimon Action jobs running in one single mini
// cluster. We overwrite this class to get the same behaviors in CDC 3.1.1.
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,7 @@ public static JdbcIncrementalSource<CdcSourceRecord> buildPostgresSource(
customConverterConfigs.put(JsonConverterConfig.DECIMAL_FORMAT_CONFIG, "numeric");
CdcDebeziumDeserializationSchema schema =
new CdcDebeziumDeserializationSchema(true, customConverterConfigs);
return sourceBuilder.deserializer(schema).includeSchemaChanges(true).build();
return sourceBuilder.deserializer(schema).build();
}

public static void registerJdbcDriver() {
Expand Down
Loading
Loading