From 1348f187f77e53a845234d84a681069ede2ff7d2 Mon Sep 17 00:00:00 2001 From: pingchunzhang Date: Sat, 8 Feb 2025 18:02:22 +0800 Subject: [PATCH] [improvement](test) improvement case compatible with multi platform(like azure) --- .../auth_call/test_dml_broker_load_auth.groovy | 3 ++- ...est_domain_connection_and_ak_sk_correction.groovy | 12 ++++++++---- .../suites/load_p0/broker_load/test_seq_load.groovy | 3 ++- .../stream_load/test_stream_load_error_url.groovy | 2 +- 4 files changed, 13 insertions(+), 7 deletions(-) diff --git a/regression-test/suites/auth_call/test_dml_broker_load_auth.groovy b/regression-test/suites/auth_call/test_dml_broker_load_auth.groovy index 0ea44241bfae2f..4eb9202f6ee8af 100644 --- a/regression-test/suites/auth_call/test_dml_broker_load_auth.groovy +++ b/regression-test/suites/auth_call/test_dml_broker_load_auth.groovy @@ -118,7 +118,8 @@ suite("test_dml_broker_load_auth","p0,auth_call") { "AWS_SECRET_KEY" = "$sk", "AWS_ENDPOINT" = "$endpoint", "AWS_REGION" = "$region", - "compress_type" = "GZ" + "compress_type" = "GZ", + "provider" = "${getS3Provider()}" ) properties( "timeout" = "28800", diff --git a/regression-test/suites/load_p0/broker_load/test_domain_connection_and_ak_sk_correction.groovy b/regression-test/suites/load_p0/broker_load/test_domain_connection_and_ak_sk_correction.groovy index a394caacf04098..36073df4b07a2c 100644 --- a/regression-test/suites/load_p0/broker_load/test_domain_connection_and_ak_sk_correction.groovy +++ b/regression-test/suites/load_p0/broker_load/test_domain_connection_and_ak_sk_correction.groovy @@ -73,7 +73,8 @@ suite("test_domain_connection_and_ak_sk_correction", "load_p0") { "AWS_ENDPOINT" = "${getS3Endpoint()}", "AWS_ACCESS_KEY" = "${getS3AK()}", "AWS_SECRET_KEY" = "${getS3SK()}", - "AWS_REGION" = "${getS3Region()}" + "AWS_REGION" = "${getS3Region()}", + "PROVIDER" = "${getS3Provider()}" ); """ logger.info("the first sql result is {}", result) @@ -93,7 +94,8 @@ suite("test_domain_connection_and_ak_sk_correction", "load_p0") { "AWS_ENDPOINT" = "${getS3Endpoint()}1", "AWS_ACCESS_KEY" = "${getS3AK()}", "AWS_SECRET_KEY" = "${getS3SK()}", - "AWS_REGION" = "${getS3Region()}" + "AWS_REGION" = "${getS3Region()}", + "PROVIDER" = "${getS3Provider()}" ); """ logger.info("the second sql result is {}", result) @@ -118,7 +120,8 @@ suite("test_domain_connection_and_ak_sk_correction", "load_p0") { "AWS_ENDPOINT" = "${getS3Endpoint()}", "AWS_ACCESS_KEY" = "${getS3AK()}1", "AWS_SECRET_KEY" = "${getS3SK()}", - "AWS_REGION" = "${getS3Region()}" + "AWS_REGION" = "${getS3Region()}", + "PROVIDER" = "${getS3Provider()}" ); """ logger.info("the third sql result is {}", result) @@ -147,7 +150,8 @@ suite("test_domain_connection_and_ak_sk_correction", "load_p0") { "AWS_ENDPOINT" = "${getS3Endpoint()}", "AWS_ACCESS_KEY" = "${getS3AK()}", "AWS_SECRET_KEY" = "${getS3SK()}", - "AWS_REGION" = "${getS3Region()}" + "AWS_REGION" = "${getS3Region()}", + "PROVIDER" = "${getS3Provider()}" ); """ logger.info("the fourth sql result is {}", result) diff --git a/regression-test/suites/load_p0/broker_load/test_seq_load.groovy b/regression-test/suites/load_p0/broker_load/test_seq_load.groovy index 9929482c878f6d..0d6a737d6a5a8a 100644 --- a/regression-test/suites/load_p0/broker_load/test_seq_load.groovy +++ b/regression-test/suites/load_p0/broker_load/test_seq_load.groovy @@ -101,7 +101,8 @@ suite("test_seq_load", "load_p0") { "AWS_ACCESS_KEY" = "$ak", "AWS_SECRET_KEY" = "$sk", "AWS_ENDPOINT" = "${getS3Endpoint()}", - "AWS_REGION" = "${getS3Region()}" + "AWS_REGION" = "${getS3Region()}", + "PROVIDER" = "${getS3Provider()}" ) """ logger.info("submit sql: ${sql_str}"); diff --git a/regression-test/suites/load_p0/stream_load/test_stream_load_error_url.groovy b/regression-test/suites/load_p0/stream_load/test_stream_load_error_url.groovy index e13db181e99581..93375a36410782 100644 --- a/regression-test/suites/load_p0/stream_load/test_stream_load_error_url.groovy +++ b/regression-test/suites/load_p0/stream_load/test_stream_load_error_url.groovy @@ -69,7 +69,7 @@ suite("test_stream_load_error_url", "p0") { assertTrue(out.contains("actual column number in csv file is more than schema column number.actual number")) log.info("url: " + json.ErrorURL) if (isCloudMode()) { - assertTrue(json.ErrorURL.contains("X-Amz-Signature=")) + assertTrue(json.ErrorURL.contains("X-Amz-Signature=") || json.ErrorURL.contains("error_log")) } } }