From e8bfc662bc0d9a091606571afd72f81aa8ef6d96 Mon Sep 17 00:00:00 2001 From: IAL32 Date: Tue, 6 Dec 2022 10:57:44 +0100 Subject: [PATCH 1/7] Migrate amazon provider transfer tests from `unittests` to `pytest` --- .../aws/transfers/test_dynamodb_to_s3.py | 3 +- .../amazon/aws/transfers/test_ftp_to_s3.py | 3 +- .../aws/transfers/test_google_api_to_s3.py | 5 +- .../aws/transfers/test_hive_to_dynamodb.py | 5 +- .../transfers/test_imap_attachment_to_s3.py | 11 ++--- .../amazon/aws/transfers/test_local_to_s3.py | 26 +++++----- .../amazon/aws/transfers/test_mongo_to_s3.py | 5 +- .../aws/transfers/test_redshift_to_s3.py | 49 +++++++------------ .../amazon/aws/transfers/test_s3_to_ftp.py | 3 +- .../aws/transfers/test_s3_to_redshift.py | 3 +- .../amazon/aws/transfers/test_s3_to_sftp.py | 18 +++---- .../aws/transfers/test_salesforce_to_s3.py | 3 +- .../amazon/aws/transfers/test_sftp_to_s3.py | 23 +++------ .../amazon/aws/transfers/test_sql_to_s3.py | 8 +-- 14 files changed, 63 insertions(+), 102 deletions(-) diff --git a/tests/providers/amazon/aws/transfers/test_dynamodb_to_s3.py b/tests/providers/amazon/aws/transfers/test_dynamodb_to_s3.py index 2aea42754b078..1c32089327eea 100644 --- a/tests/providers/amazon/aws/transfers/test_dynamodb_to_s3.py +++ b/tests/providers/amazon/aws/transfers/test_dynamodb_to_s3.py @@ -18,13 +18,12 @@ from __future__ import annotations import json -import unittest from unittest.mock import MagicMock, patch from airflow.providers.amazon.aws.transfers.dynamodb_to_s3 import DynamoDBToS3Operator -class DynamodbToS3Test(unittest.TestCase): +class DynamodbToS3Test: def setUp(self): self.output_queue = [] diff --git a/tests/providers/amazon/aws/transfers/test_ftp_to_s3.py b/tests/providers/amazon/aws/transfers/test_ftp_to_s3.py index 9bd05cbb5c377..3c0ead8cfed77 100644 --- a/tests/providers/amazon/aws/transfers/test_ftp_to_s3.py +++ b/tests/providers/amazon/aws/transfers/test_ftp_to_s3.py @@ -17,7 +17,6 @@ # under the License. from __future__ import annotations -import unittest from unittest import mock from airflow.providers.amazon.aws.transfers.ftp_to_s3 import FTPToS3Operator @@ -32,7 +31,7 @@ FTP_PATH_MULTIPLE = "/tmp/" -class TestFTPToS3Operator(unittest.TestCase): +class TestFTPToS3Operator: def assert_execute( self, mock_local_tmp_file, mock_s3_hook_load_file, mock_ftp_hook_retrieve_file, ftp_file, s3_file ): diff --git a/tests/providers/amazon/aws/transfers/test_google_api_to_s3.py b/tests/providers/amazon/aws/transfers/test_google_api_to_s3.py index 1fac172f6ea33..6ddc968d3bd52 100644 --- a/tests/providers/amazon/aws/transfers/test_google_api_to_s3.py +++ b/tests/providers/amazon/aws/transfers/test_google_api_to_s3.py @@ -17,7 +17,6 @@ # under the License. from __future__ import annotations -import unittest from unittest.mock import Mock, patch import pytest @@ -29,8 +28,8 @@ from airflow.utils import db -class TestGoogleApiToS3(unittest.TestCase): - def setUp(self): +class TestGoogleApiToS3: + def setup_method(self): conf.load_test_config() db.merge_conn( diff --git a/tests/providers/amazon/aws/transfers/test_hive_to_dynamodb.py b/tests/providers/amazon/aws/transfers/test_hive_to_dynamodb.py index c1a2b766763be..07676540e4ab0 100644 --- a/tests/providers/amazon/aws/transfers/test_hive_to_dynamodb.py +++ b/tests/providers/amazon/aws/transfers/test_hive_to_dynamodb.py @@ -19,7 +19,6 @@ import datetime import json -import unittest from unittest import mock import pandas as pd @@ -34,8 +33,8 @@ DEFAULT_DATE_DS = DEFAULT_DATE_ISO[:10] -class TestHiveToDynamoDBOperator(unittest.TestCase): - def setUp(self): +class TestHiveToDynamoDBOperator: + def setup_method(self): args = {"owner": "airflow", "start_date": DEFAULT_DATE} dag = DAG("test_dag_id", default_args=args) self.dag = dag diff --git a/tests/providers/amazon/aws/transfers/test_imap_attachment_to_s3.py b/tests/providers/amazon/aws/transfers/test_imap_attachment_to_s3.py index 1663f90eac462..acb5bb713461b 100644 --- a/tests/providers/amazon/aws/transfers/test_imap_attachment_to_s3.py +++ b/tests/providers/amazon/aws/transfers/test_imap_attachment_to_s3.py @@ -17,14 +17,13 @@ # under the License. from __future__ import annotations -import unittest -from unittest.mock import patch +from unittest import mock from airflow.providers.amazon.aws.transfers.imap_attachment_to_s3 import ImapAttachmentToS3Operator -class TestImapAttachmentToS3Operator(unittest.TestCase): - def setUp(self): +class TestImapAttachmentToS3Operator: + def setup_method(self): self.kwargs = dict( imap_attachment_name="test_file", s3_bucket="test_bucket", @@ -37,8 +36,8 @@ def setUp(self): dag=None, ) - @patch("airflow.providers.amazon.aws.transfers.imap_attachment_to_s3.S3Hook") - @patch("airflow.providers.amazon.aws.transfers.imap_attachment_to_s3.ImapHook") + @mock.patch("airflow.providers.amazon.aws.transfers.imap_attachment_to_s3.S3Hook") + @mock.patch("airflow.providers.amazon.aws.transfers.imap_attachment_to_s3.ImapHook") def test_execute(self, mock_imap_hook, mock_s3_hook): mock_imap_hook.return_value.__enter__ = mock_imap_hook mock_imap_hook.return_value.retrieve_mail_attachments.return_value = [("test_file", b"Hello World")] diff --git a/tests/providers/amazon/aws/transfers/test_local_to_s3.py b/tests/providers/amazon/aws/transfers/test_local_to_s3.py index 2d53d19256c93..8c39ca4238bb6 100644 --- a/tests/providers/amazon/aws/transfers/test_local_to_s3.py +++ b/tests/providers/amazon/aws/transfers/test_local_to_s3.py @@ -19,7 +19,6 @@ import datetime import os -import unittest import boto3 from moto import mock_s3 @@ -27,12 +26,11 @@ from airflow.models.dag import DAG from airflow.providers.amazon.aws.transfers.local_to_s3 import LocalFilesystemToS3Operator +CONFIG = {"verify": False, "replace": False, "encrypt": False, "gzip": False} -class TestFileToS3Operator(unittest.TestCase): +class TestFileToS3Operator: - _config = {"verify": False, "replace": False, "encrypt": False, "gzip": False} - - def setUp(self): + def setup_method(self): args = {"owner": "airflow", "start_date": datetime.datetime(2017, 1, 1)} self.dag = DAG("test_dag_id", default_args=args) self.dest_key = "test/test1.csv" @@ -41,7 +39,7 @@ def setUp(self): with open(self.testfile1, "wb") as f: f.write(b"x" * 393216) - def tearDown(self): + def teardown_method(self): os.remove(self.testfile1) def test_init(self): @@ -51,15 +49,15 @@ def test_init(self): filename=self.testfile1, dest_key=self.dest_key, dest_bucket=self.dest_bucket, - **self._config, + **CONFIG, ) assert operator.filename == self.testfile1 assert operator.dest_key == self.dest_key assert operator.dest_bucket == self.dest_bucket - assert operator.verify == self._config["verify"] - assert operator.replace == self._config["replace"] - assert operator.encrypt == self._config["encrypt"] - assert operator.gzip == self._config["gzip"] + assert operator.verify == CONFIG["verify"] + assert operator.replace == CONFIG["replace"] + assert operator.encrypt == CONFIG["encrypt"] + assert operator.gzip == CONFIG["gzip"] def test_execute_exception(self): operator = LocalFilesystemToS3Operator( @@ -68,7 +66,7 @@ def test_execute_exception(self): filename=self.testfile1, dest_key=f"s3://dummy/{self.dest_key}", dest_bucket=self.dest_bucket, - **self._config, + **CONFIG, ) with self.assertRaises(TypeError): operator.execute(None) @@ -83,7 +81,7 @@ def test_execute(self): filename=self.testfile1, dest_key=self.dest_key, dest_bucket=self.dest_bucket, - **self._config, + **CONFIG, ) operator.execute(None) @@ -102,7 +100,7 @@ def test_execute_with_only_key(self): dag=self.dag, filename=self.testfile1, dest_key=f"s3://dummy/{self.dest_key}", - **self._config, + **CONFIG, ) operator.execute(None) diff --git a/tests/providers/amazon/aws/transfers/test_mongo_to_s3.py b/tests/providers/amazon/aws/transfers/test_mongo_to_s3.py index 9fb06866fb47c..f2bd53318c3e8 100644 --- a/tests/providers/amazon/aws/transfers/test_mongo_to_s3.py +++ b/tests/providers/amazon/aws/transfers/test_mongo_to_s3.py @@ -17,7 +17,6 @@ # under the License. from __future__ import annotations -import unittest from unittest import mock from airflow.models import DAG, DagRun, TaskInstance @@ -40,8 +39,8 @@ ] -class TestMongoToS3Operator(unittest.TestCase): - def setUp(self): +class TestMongoToS3Operator: + def setup_method(self): args = {"owner": "airflow", "start_date": DEFAULT_DATE} self.dag = DAG("test_dag_id", default_args=args) diff --git a/tests/providers/amazon/aws/transfers/test_redshift_to_s3.py b/tests/providers/amazon/aws/transfers/test_redshift_to_s3.py index a94a3fce6d19d..91a1ae32da362 100644 --- a/tests/providers/amazon/aws/transfers/test_redshift_to_s3.py +++ b/tests/providers/amazon/aws/transfers/test_redshift_to_s3.py @@ -17,11 +17,10 @@ # under the License. from __future__ import annotations -import unittest from unittest import mock +import pytest from boto3.session import Session -from parameterized import parameterized from airflow.models.connection import Connection from airflow.providers.amazon.aws.transfers.redshift_to_s3 import RedshiftToS3Operator @@ -29,25 +28,20 @@ from tests.test_utils.asserts import assert_equal_ignore_multiple_spaces -class TestRedshiftToS3Transfer(unittest.TestCase): - @parameterized.expand( - [ - [True, "key/table_"], - [False, "key"], - ] - ) +class TestRedshiftToS3Transfer: + @pytest.mark.parametrize("table_as_file_name, expected_s3_key", [[True, "key/table_"], [False, "key"]]) @mock.patch("airflow.providers.amazon.aws.hooks.s3.S3Hook.get_connection") @mock.patch("airflow.models.connection.Connection") @mock.patch("boto3.session.Session") @mock.patch("airflow.providers.amazon.aws.hooks.redshift_sql.RedshiftSQLHook.run") def test_table_unloading( self, - table_as_file_name, - expected_s3_key, mock_run, mock_session, mock_connection, mock_hook, + table_as_file_name, + expected_s3_key, ): access_key = "aws_access_key_id" secret_key = "aws_secret_access_key" @@ -94,24 +88,19 @@ def test_table_unloading( assert secret_key in unload_query assert_equal_ignore_multiple_spaces(self, mock_run.call_args[0][0], unload_query) - @parameterized.expand( - [ - [True, "key/table_"], - [False, "key"], - ] - ) + @pytest.mark.parametrize("table_as_file_name, expected_s3_key", [[True, "key/table_"], [False, "key"]]) @mock.patch("airflow.providers.amazon.aws.hooks.s3.S3Hook.get_connection") @mock.patch("airflow.models.connection.Connection") @mock.patch("boto3.session.Session") @mock.patch("airflow.providers.amazon.aws.hooks.redshift_sql.RedshiftSQLHook.run") def test_execute_sts_token( self, - table_as_file_name, - expected_s3_key, mock_run, mock_session, mock_connection, mock_hook, + table_as_file_name, + expected_s3_key, ): access_key = "ASIA_aws_access_key_id" secret_key = "aws_secret_access_key" @@ -160,13 +149,14 @@ def test_execute_sts_token( assert token in unload_query assert_equal_ignore_multiple_spaces(self, mock_run.call_args[0][0], unload_query) - @parameterized.expand( + @pytest.mark.parametrize( + "table, table_as_file_name, expected_s3_key", [ ["table", True, "key/table_"], ["table", False, "key"], [None, False, "key"], [None, True, "key"], - ] + ], ) @mock.patch("airflow.providers.amazon.aws.hooks.s3.S3Hook.get_connection") @mock.patch("airflow.models.connection.Connection") @@ -174,13 +164,13 @@ def test_execute_sts_token( @mock.patch("airflow.providers.amazon.aws.hooks.redshift_sql.RedshiftSQLHook.run") def test_custom_select_query_unloading( self, - table, - table_as_file_name, - expected_s3_key, mock_run, mock_session, mock_connection, mock_hook, + table, + table_as_file_name, + expected_s3_key, ): access_key = "aws_access_key_id" secret_key = "aws_secret_access_key" @@ -225,24 +215,19 @@ def test_custom_select_query_unloading( assert secret_key in unload_query assert_equal_ignore_multiple_spaces(self, mock_run.call_args[0][0], unload_query) - @parameterized.expand( - [ - [True, "key/table_"], - [False, "key"], - ] - ) + @pytest.mark.parametrize("table_as_file_name, expected_s3_key", [[True, "key/table_"], [False, "key"]]) @mock.patch("airflow.providers.amazon.aws.hooks.s3.S3Hook.get_connection") @mock.patch("airflow.models.connection.Connection") @mock.patch("boto3.session.Session") @mock.patch("airflow.providers.amazon.aws.hooks.redshift_sql.RedshiftSQLHook.run") def test_table_unloading_role_arn( self, - table_as_file_name, - expected_s3_key, mock_run, mock_session, mock_connection, mock_hook, + table_as_file_name, + expected_s3_key, ): access_key = "aws_access_key_id" secret_key = "aws_secret_access_key" diff --git a/tests/providers/amazon/aws/transfers/test_s3_to_ftp.py b/tests/providers/amazon/aws/transfers/test_s3_to_ftp.py index 3bb28b692a26e..6308d34ac020a 100644 --- a/tests/providers/amazon/aws/transfers/test_s3_to_ftp.py +++ b/tests/providers/amazon/aws/transfers/test_s3_to_ftp.py @@ -17,7 +17,6 @@ # under the License. from __future__ import annotations -import unittest from unittest import mock from airflow.providers.amazon.aws.transfers.s3_to_ftp import S3ToFTPOperator @@ -30,7 +29,7 @@ FTP_CONN_ID = "ftp_default" -class TestS3ToFTPOperator(unittest.TestCase): +class TestS3ToFTPOperator: @mock.patch("airflow.providers.ftp.hooks.ftp.FTPHook.store_file") @mock.patch("airflow.providers.amazon.aws.hooks.s3.S3Hook.get_key") @mock.patch("airflow.providers.amazon.aws.transfers.s3_to_ftp.NamedTemporaryFile") diff --git a/tests/providers/amazon/aws/transfers/test_s3_to_redshift.py b/tests/providers/amazon/aws/transfers/test_s3_to_redshift.py index 56c72a0354af3..e69673b27e36d 100644 --- a/tests/providers/amazon/aws/transfers/test_s3_to_redshift.py +++ b/tests/providers/amazon/aws/transfers/test_s3_to_redshift.py @@ -17,7 +17,6 @@ # under the License. from __future__ import annotations -import unittest from unittest import mock import pytest @@ -29,7 +28,7 @@ from tests.test_utils.asserts import assert_equal_ignore_multiple_spaces -class TestS3ToRedshiftTransfer(unittest.TestCase): +class TestS3ToRedshiftTransfer: @mock.patch("airflow.providers.amazon.aws.hooks.s3.S3Hook.get_connection") @mock.patch("airflow.models.connection.Connection") @mock.patch("boto3.session.Session") diff --git a/tests/providers/amazon/aws/transfers/test_s3_to_sftp.py b/tests/providers/amazon/aws/transfers/test_s3_to_sftp.py index cef89e396740d..879206d896650 100644 --- a/tests/providers/amazon/aws/transfers/test_s3_to_sftp.py +++ b/tests/providers/amazon/aws/transfers/test_s3_to_sftp.py @@ -17,13 +17,13 @@ # under the License. from __future__ import annotations -import unittest - import boto3 from moto import mock_s3 from airflow.models import DAG +from airflow.providers.amazon.aws.hooks.s3 import S3Hook from airflow.providers.amazon.aws.transfers.s3_to_sftp import S3ToSFTPOperator +from airflow.providers.ssh.hooks.ssh import SSHHook from airflow.providers.ssh.operators.ssh import SSHOperator from airflow.utils.timezone import datetime from tests.test_utils.config import conf_vars @@ -42,14 +42,10 @@ DEFAULT_DATE = datetime(2018, 1, 1) -class TestS3ToSFTPOperator(unittest.TestCase): - @mock_s3 - def setUp(self): - from airflow.providers.amazon.aws.hooks.s3 import S3Hook - from airflow.providers.ssh.hooks.ssh import SSHHook +class TestS3ToSFTPOperator: + def setup_method(self): hook = SSHHook(ssh_conn_id="ssh_default") - s3_hook = S3Hook("aws_default") hook.no_host_key_check = True dag = DAG( f"{TEST_DAG_ID}test_schedule_dag_once", @@ -58,7 +54,6 @@ def setUp(self): ) self.hook = hook - self.s3_hook = s3_hook self.ssh_client = self.hook.get_conn() self.sftp_client = self.ssh_client.open_sftp() @@ -84,7 +79,8 @@ def test_s3_to_sftp_operation(self): with open(LOCAL_FILE_PATH, "w") as file: file.write(test_remote_file_content) - self.s3_hook.load_file(LOCAL_FILE_PATH, self.s3_key, bucket_name=BUCKET) + s3_hook = S3Hook("aws_default") + s3_hook.load_file(LOCAL_FILE_PATH, self.s3_key, bucket_name=BUCKET) # Check if object was created in s3 objects_in_dest_bucket = conn.list_objects(Bucket=self.s3_bucket, Prefix=self.s3_key) @@ -136,5 +132,5 @@ def delete_remote_resource(self): assert remove_file_task is not None remove_file_task.execute(None) - def tearDown(self): + def teardown_method(self): self.delete_remote_resource() diff --git a/tests/providers/amazon/aws/transfers/test_salesforce_to_s3.py b/tests/providers/amazon/aws/transfers/test_salesforce_to_s3.py index b861a2cdc82f2..5dcc1890c244b 100644 --- a/tests/providers/amazon/aws/transfers/test_salesforce_to_s3.py +++ b/tests/providers/amazon/aws/transfers/test_salesforce_to_s3.py @@ -16,7 +16,6 @@ # under the License. from __future__ import annotations -import unittest from collections import OrderedDict from unittest import mock @@ -55,7 +54,7 @@ ACL_POLICY = None -class TestSalesforceToGcsOperator(unittest.TestCase): +class TestSalesforceToGcsOperator: @mock.patch.object(S3Hook, "load_file") @mock.patch.object(SalesforceHook, "write_object_to_file") @mock.patch.object(SalesforceHook, "make_query") diff --git a/tests/providers/amazon/aws/transfers/test_sftp_to_s3.py b/tests/providers/amazon/aws/transfers/test_sftp_to_s3.py index 4a23d54d63fe3..f46b9b0ea303a 100644 --- a/tests/providers/amazon/aws/transfers/test_sftp_to_s3.py +++ b/tests/providers/amazon/aws/transfers/test_sftp_to_s3.py @@ -17,11 +17,9 @@ # under the License. from __future__ import annotations -import unittest - import boto3 +import pytest from moto import mock_s3 -from parameterized import parameterized from airflow.models import DAG from airflow.providers.amazon.aws.hooks.s3 import S3Hook @@ -44,12 +42,10 @@ DEFAULT_DATE = datetime(2018, 1, 1) -class TestSFTPToS3Operator(unittest.TestCase): - @mock_s3 - def setUp(self): +class TestSFTPToS3Operator: + def setup_method(self): hook = SSHHook(ssh_conn_id="ssh_default") - s3_hook = S3Hook("aws_default") hook.no_host_key_check = True dag = DAG( f"{TEST_DAG_ID}test_schedule_dag_once", @@ -58,7 +54,6 @@ def setUp(self): ) self.hook = hook - self.s3_hook = s3_hook self.ssh_client = self.hook.get_conn() self.sftp_client = self.ssh_client.open_sftp() @@ -68,15 +63,10 @@ def setUp(self): self.sftp_path = SFTP_PATH self.s3_key = S3_KEY - @parameterized.expand( - [ - (True,), - (False,), - ] - ) + @pytest.mark.parametrize("use_temp_file", [True, False]) @mock_s3 @conf_vars({("core", "enable_xcom_pickling"): "True"}) - def test_sftp_to_s3_operation(self, use_temp_file=True): + def test_sftp_to_s3_operation(self, use_temp_file): # Setting test_remote_file_content = ( "This is remote file content \n which is also multiline " @@ -95,9 +85,10 @@ def test_sftp_to_s3_operation(self, use_temp_file=True): create_file_task.execute(None) # Test for creation of s3 bucket + s3_hook = S3Hook("aws_default") conn = boto3.client("s3") conn.create_bucket(Bucket=self.s3_bucket) - assert self.s3_hook.check_for_bucket(self.s3_bucket) + assert s3_hook.check_for_bucket(self.s3_bucket) # get remote file to local run_task = SFTPToS3Operator( diff --git a/tests/providers/amazon/aws/transfers/test_sql_to_s3.py b/tests/providers/amazon/aws/transfers/test_sql_to_s3.py index fd7f25a009fe3..fe4a9d512dda2 100644 --- a/tests/providers/amazon/aws/transfers/test_sql_to_s3.py +++ b/tests/providers/amazon/aws/transfers/test_sql_to_s3.py @@ -17,7 +17,6 @@ # under the License. from __future__ import annotations -import unittest from tempfile import NamedTemporaryFile from unittest import mock @@ -29,7 +28,7 @@ from airflow.providers.amazon.aws.transfers.sql_to_s3 import SqlToS3Operator -class TestSqlToS3Operator(unittest.TestCase): +class TestSqlToS3Operator: @mock.patch("airflow.providers.amazon.aws.transfers.sql_to_s3.NamedTemporaryFile") @mock.patch("airflow.providers.amazon.aws.transfers.sql_to_s3.S3Hook") def test_execute_csv(self, mock_s3_hook, temp_mock): @@ -146,11 +145,12 @@ def test_execute_json(self, mock_s3_hook, temp_mock): replace=True, ) - @parameterized.expand( + @pytest.mark.parametrize( + "_, params", [ ("with-csv", {"file_format": "csv", "null_string_result": None}), ("with-parquet", {"file_format": "parquet", "null_string_result": "None"}), - ] + ], ) def test_fix_dtypes(self, _, params): op = SqlToS3Operator( From 377138bd6a3aca89a14407741909b259d94d6346 Mon Sep 17 00:00:00 2001 From: IAL32 Date: Tue, 6 Dec 2022 12:46:19 +0100 Subject: [PATCH 2/7] Fix --- tests/providers/amazon/aws/transfers/test_local_to_s3.py | 3 ++- tests/providers/amazon/aws/transfers/test_s3_to_sftp.py | 2 +- tests/providers/amazon/aws/transfers/test_sftp_to_s3.py | 2 +- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/tests/providers/amazon/aws/transfers/test_local_to_s3.py b/tests/providers/amazon/aws/transfers/test_local_to_s3.py index 8c39ca4238bb6..54272178e0341 100644 --- a/tests/providers/amazon/aws/transfers/test_local_to_s3.py +++ b/tests/providers/amazon/aws/transfers/test_local_to_s3.py @@ -21,6 +21,7 @@ import os import boto3 +import pytest from moto import mock_s3 from airflow.models.dag import DAG @@ -68,7 +69,7 @@ def test_execute_exception(self): dest_bucket=self.dest_bucket, **CONFIG, ) - with self.assertRaises(TypeError): + with pytest.raises(TypeError): operator.execute(None) @mock_s3 diff --git a/tests/providers/amazon/aws/transfers/test_s3_to_sftp.py b/tests/providers/amazon/aws/transfers/test_s3_to_sftp.py index 879206d896650..687b7b1b0966e 100644 --- a/tests/providers/amazon/aws/transfers/test_s3_to_sftp.py +++ b/tests/providers/amazon/aws/transfers/test_s3_to_sftp.py @@ -118,7 +118,7 @@ def test_s3_to_sftp_operation(self): # Clean up after finishing with test conn.delete_object(Bucket=self.s3_bucket, Key=self.s3_key) conn.delete_bucket(Bucket=self.s3_bucket) - assert not self.s3_hook.check_for_bucket(self.s3_bucket) + assert not s3_hook.check_for_bucket(self.s3_bucket) def delete_remote_resource(self): # check the remote file content diff --git a/tests/providers/amazon/aws/transfers/test_sftp_to_s3.py b/tests/providers/amazon/aws/transfers/test_sftp_to_s3.py index f46b9b0ea303a..a1b586ee68d9b 100644 --- a/tests/providers/amazon/aws/transfers/test_sftp_to_s3.py +++ b/tests/providers/amazon/aws/transfers/test_sftp_to_s3.py @@ -116,4 +116,4 @@ def test_sftp_to_s3_operation(self, use_temp_file): # Clean up after finishing with test conn.delete_object(Bucket=self.s3_bucket, Key=self.s3_key) conn.delete_bucket(Bucket=self.s3_bucket) - assert not self.s3_hook.check_for_bucket(self.s3_bucket) + assert not s3_hook.check_for_bucket(self.s3_bucket) From 886014be354ccd84c19f75b73bb41c9b26da2f0e Mon Sep 17 00:00:00 2001 From: IAL32 Date: Tue, 6 Dec 2022 13:01:43 +0100 Subject: [PATCH 3/7] fix --- tests/providers/amazon/aws/transfers/test_s3_to_sftp.py | 2 +- tests/providers/amazon/aws/transfers/test_sftp_to_s3.py | 2 +- tests/providers/amazon/aws/transfers/test_sql_to_s3.py | 9 ++++----- 3 files changed, 6 insertions(+), 7 deletions(-) diff --git a/tests/providers/amazon/aws/transfers/test_s3_to_sftp.py b/tests/providers/amazon/aws/transfers/test_s3_to_sftp.py index 687b7b1b0966e..37b8b72ed8eb6 100644 --- a/tests/providers/amazon/aws/transfers/test_s3_to_sftp.py +++ b/tests/providers/amazon/aws/transfers/test_s3_to_sftp.py @@ -79,7 +79,7 @@ def test_s3_to_sftp_operation(self): with open(LOCAL_FILE_PATH, "w") as file: file.write(test_remote_file_content) - s3_hook = S3Hook("aws_default") + s3_hook = S3Hook(aws_conn_id=None) s3_hook.load_file(LOCAL_FILE_PATH, self.s3_key, bucket_name=BUCKET) # Check if object was created in s3 diff --git a/tests/providers/amazon/aws/transfers/test_sftp_to_s3.py b/tests/providers/amazon/aws/transfers/test_sftp_to_s3.py index a1b586ee68d9b..d3a301c823030 100644 --- a/tests/providers/amazon/aws/transfers/test_sftp_to_s3.py +++ b/tests/providers/amazon/aws/transfers/test_sftp_to_s3.py @@ -85,7 +85,7 @@ def test_sftp_to_s3_operation(self, use_temp_file): create_file_task.execute(None) # Test for creation of s3 bucket - s3_hook = S3Hook("aws_default") + s3_hook = S3Hook(aws_conn_id=None) conn = boto3.client("s3") conn.create_bucket(Bucket=self.s3_bucket) assert s3_hook.check_for_bucket(self.s3_bucket) diff --git a/tests/providers/amazon/aws/transfers/test_sql_to_s3.py b/tests/providers/amazon/aws/transfers/test_sql_to_s3.py index fe4a9d512dda2..84db615eebccf 100644 --- a/tests/providers/amazon/aws/transfers/test_sql_to_s3.py +++ b/tests/providers/amazon/aws/transfers/test_sql_to_s3.py @@ -22,7 +22,6 @@ import pandas as pd import pytest -from parameterized import parameterized from airflow.exceptions import AirflowException from airflow.providers.amazon.aws.transfers.sql_to_s3 import SqlToS3Operator @@ -146,13 +145,13 @@ def test_execute_json(self, mock_s3_hook, temp_mock): ) @pytest.mark.parametrize( - "_, params", + "params", [ - ("with-csv", {"file_format": "csv", "null_string_result": None}), - ("with-parquet", {"file_format": "parquet", "null_string_result": "None"}), + pytest.param({"file_format": "csv", "null_string_result": None}, id="with-csv"), + pytest.param({"file_format": "parquet", "null_string_result": "None"}, id="with-parquet"), ], ) - def test_fix_dtypes(self, _, params): + def test_fix_dtypes(self, params): op = SqlToS3Operator( query="query", s3_bucket="s3_bucket", From 558bb0bd7120d26c81c010c81951b0e0fa2c0d2b Mon Sep 17 00:00:00 2001 From: IAL32 Date: Tue, 6 Dec 2022 13:25:05 +0100 Subject: [PATCH 4/7] Silly fix --- tests/providers/amazon/aws/transfers/test_s3_to_sftp.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/providers/amazon/aws/transfers/test_s3_to_sftp.py b/tests/providers/amazon/aws/transfers/test_s3_to_sftp.py index 37b8b72ed8eb6..74b5273f652e1 100644 --- a/tests/providers/amazon/aws/transfers/test_s3_to_sftp.py +++ b/tests/providers/amazon/aws/transfers/test_s3_to_sftp.py @@ -66,6 +66,7 @@ def setup_method(self): @mock_s3 @conf_vars({("core", "enable_xcom_pickling"): "True"}) def test_s3_to_sftp_operation(self): + s3_hook = S3Hook(aws_conn_id=None) # Setting test_remote_file_content = ( "This is remote file content \n which is also multiline " @@ -75,11 +76,10 @@ def test_s3_to_sftp_operation(self): # Test for creation of s3 bucket conn = boto3.client("s3") conn.create_bucket(Bucket=self.s3_bucket) - assert self.s3_hook.check_for_bucket(self.s3_bucket) + assert s3_hook.check_for_bucket(self.s3_bucket) with open(LOCAL_FILE_PATH, "w") as file: file.write(test_remote_file_content) - s3_hook = S3Hook(aws_conn_id=None) s3_hook.load_file(LOCAL_FILE_PATH, self.s3_key, bucket_name=BUCKET) # Check if object was created in s3 From ea9caeadbe36dbfe7454ba33d887526ea10a9327 Mon Sep 17 00:00:00 2001 From: IAL32 Date: Tue, 6 Dec 2022 13:52:13 +0100 Subject: [PATCH 5/7] Fix black --- tests/providers/amazon/aws/transfers/test_local_to_s3.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/providers/amazon/aws/transfers/test_local_to_s3.py b/tests/providers/amazon/aws/transfers/test_local_to_s3.py index 54272178e0341..ad811a0a0c886 100644 --- a/tests/providers/amazon/aws/transfers/test_local_to_s3.py +++ b/tests/providers/amazon/aws/transfers/test_local_to_s3.py @@ -29,8 +29,8 @@ CONFIG = {"verify": False, "replace": False, "encrypt": False, "gzip": False} -class TestFileToS3Operator: +class TestFileToS3Operator: def setup_method(self): args = {"owner": "airflow", "start_date": datetime.datetime(2017, 1, 1)} self.dag = DAG("test_dag_id", default_args=args) From e23bff7ee28ca5969c6567e93c93f0c7d7f6332f Mon Sep 17 00:00:00 2001 From: IAL32 Date: Tue, 6 Dec 2022 22:39:58 +0100 Subject: [PATCH 6/7] fix --- tests/providers/amazon/aws/transfers/test_dynamodb_to_s3.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/providers/amazon/aws/transfers/test_dynamodb_to_s3.py b/tests/providers/amazon/aws/transfers/test_dynamodb_to_s3.py index b220812fdab18..6dcac793a4cbd 100644 --- a/tests/providers/amazon/aws/transfers/test_dynamodb_to_s3.py +++ b/tests/providers/amazon/aws/transfers/test_dynamodb_to_s3.py @@ -36,7 +36,7 @@ def test_jsonencoder_with_decimal(self): class DynamodbToS3Test: - def setup(self): + def setup_method(self): self.output_queue = [] def mock_upload_file(self, Filename, Bucket, Key): From 9960d4325c4f98e807b4019625a566cfb26db18c Mon Sep 17 00:00:00 2001 From: IAL32 Date: Tue, 6 Dec 2022 22:44:03 +0100 Subject: [PATCH 7/7] Fix --- tests/providers/amazon/aws/sensors/test_batch.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/providers/amazon/aws/sensors/test_batch.py b/tests/providers/amazon/aws/sensors/test_batch.py index d7905d563ff6a..835b99ad0a5c2 100644 --- a/tests/providers/amazon/aws/sensors/test_batch.py +++ b/tests/providers/amazon/aws/sensors/test_batch.py @@ -42,7 +42,7 @@ def setup_method(self): @mock.patch.object(BatchClientHook, "get_job_description") def test_poke_on_success_state(self, mock_get_job_description): mock_get_job_description.return_value = {"status": "SUCCEEDED"} - assert self.batch_sensor.poke({}) + assert self.batch_sensor.poke({}) is True mock_get_job_description.assert_called_once_with(JOB_ID) @mock.patch.object(BatchClientHook, "get_job_description")