From abd2aed6a6114e4c2cb94e1bf7b6ab3133e98b55 Mon Sep 17 00:00:00 2001 From: Radek Stankiewicz Date: Mon, 6 Oct 2025 14:47:28 +0200 Subject: [PATCH] input is reshuffled so we can't guarantee that same input row will get specific job id based on which we have assert. --- sdks/python/apache_beam/io/gcp/bigquery_file_loads_test.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/sdks/python/apache_beam/io/gcp/bigquery_file_loads_test.py b/sdks/python/apache_beam/io/gcp/bigquery_file_loads_test.py index c318b1988536..5ae93cd4f5aa 100644 --- a/sdks/python/apache_beam/io/gcp/bigquery_file_loads_test.py +++ b/sdks/python/apache_beam/io/gcp/bigquery_file_loads_test.py @@ -891,7 +891,7 @@ def dynamic_destination_resolver(element, *side_inputs): Mock(jobReference=bigquery_api.JobReference(jobId=f'job_name{i}')) # Order matters in a sense to prove that jobs with different ids # (`2` & `3`) are run with `WRITE_APPEND` without this current fix. - for i in [1, 2, 1, 3, 1] + for i in [1, 1, 1, 1, 1] ] mock_perform_start_job.side_effect = mock_jobs @@ -955,7 +955,7 @@ def dynamic_destination_resolver(element, *side_inputs): TableReference( datasetId='dataset1', projectId='project1', - tableId='job_name2'), + tableId='job_name1'), TableReference( datasetId='dataset1', projectId='project1', @@ -984,7 +984,7 @@ def dynamic_destination_resolver(element, *side_inputs): TableReference( datasetId='dataset3', projectId='project1', - tableId='job_name3'), + tableId='job_name1'), TableReference( datasetId='dataset3', projectId='project1',