diff --git a/tests/providers/amazon/aws/sensors/test_batch.py b/tests/providers/amazon/aws/sensors/test_batch.py index d7905d563ff6a..835b99ad0a5c2 100644 --- a/tests/providers/amazon/aws/sensors/test_batch.py +++ b/tests/providers/amazon/aws/sensors/test_batch.py @@ -42,7 +42,7 @@ def setup_method(self): @mock.patch.object(BatchClientHook, "get_job_description") def test_poke_on_success_state(self, mock_get_job_description): mock_get_job_description.return_value = {"status": "SUCCEEDED"} - assert self.batch_sensor.poke({}) + assert self.batch_sensor.poke({}) is True mock_get_job_description.assert_called_once_with(JOB_ID) @mock.patch.object(BatchClientHook, "get_job_description") diff --git a/tests/providers/amazon/aws/transfers/test_dynamodb_to_s3.py b/tests/providers/amazon/aws/transfers/test_dynamodb_to_s3.py index 2de653fb4a512..6dcac793a4cbd 100644 --- a/tests/providers/amazon/aws/transfers/test_dynamodb_to_s3.py +++ b/tests/providers/amazon/aws/transfers/test_dynamodb_to_s3.py @@ -18,14 +18,13 @@ from __future__ import annotations import json -import unittest from decimal import Decimal from unittest.mock import MagicMock, patch from airflow.providers.amazon.aws.transfers.dynamodb_to_s3 import DynamoDBToS3Operator, JSONEncoder -class JSONEncoderTest(unittest.TestCase): +class JSONEncoderTest: def test_jsonencoder_with_decimal(self): """Test JSONEncoder correctly encodes and decodes decimal values.""" @@ -36,8 +35,8 @@ def test_jsonencoder_with_decimal(self): self.assertAlmostEqual(decoded, org) -class DynamodbToS3Test(unittest.TestCase): - def setUp(self): +class DynamodbToS3Test: + def setup_method(self): self.output_queue = [] def mock_upload_file(self, Filename, Bucket, Key): diff --git a/tests/providers/amazon/aws/transfers/test_ftp_to_s3.py b/tests/providers/amazon/aws/transfers/test_ftp_to_s3.py index 9bd05cbb5c377..3c0ead8cfed77 100644 --- a/tests/providers/amazon/aws/transfers/test_ftp_to_s3.py +++ b/tests/providers/amazon/aws/transfers/test_ftp_to_s3.py @@ -17,7 +17,6 @@ # under the License. from __future__ import annotations -import unittest from unittest import mock from airflow.providers.amazon.aws.transfers.ftp_to_s3 import FTPToS3Operator @@ -32,7 +31,7 @@ FTP_PATH_MULTIPLE = "/tmp/" -class TestFTPToS3Operator(unittest.TestCase): +class TestFTPToS3Operator: def assert_execute( self, mock_local_tmp_file, mock_s3_hook_load_file, mock_ftp_hook_retrieve_file, ftp_file, s3_file ): diff --git a/tests/providers/amazon/aws/transfers/test_google_api_to_s3.py b/tests/providers/amazon/aws/transfers/test_google_api_to_s3.py index 1fac172f6ea33..6ddc968d3bd52 100644 --- a/tests/providers/amazon/aws/transfers/test_google_api_to_s3.py +++ b/tests/providers/amazon/aws/transfers/test_google_api_to_s3.py @@ -17,7 +17,6 @@ # under the License. from __future__ import annotations -import unittest from unittest.mock import Mock, patch import pytest @@ -29,8 +28,8 @@ from airflow.utils import db -class TestGoogleApiToS3(unittest.TestCase): - def setUp(self): +class TestGoogleApiToS3: + def setup_method(self): conf.load_test_config() db.merge_conn( diff --git a/tests/providers/amazon/aws/transfers/test_hive_to_dynamodb.py b/tests/providers/amazon/aws/transfers/test_hive_to_dynamodb.py index c1a2b766763be..07676540e4ab0 100644 --- a/tests/providers/amazon/aws/transfers/test_hive_to_dynamodb.py +++ b/tests/providers/amazon/aws/transfers/test_hive_to_dynamodb.py @@ -19,7 +19,6 @@ import datetime import json -import unittest from unittest import mock import pandas as pd @@ -34,8 +33,8 @@ DEFAULT_DATE_DS = DEFAULT_DATE_ISO[:10] -class TestHiveToDynamoDBOperator(unittest.TestCase): - def setUp(self): +class TestHiveToDynamoDBOperator: + def setup_method(self): args = {"owner": "airflow", "start_date": DEFAULT_DATE} dag = DAG("test_dag_id", default_args=args) self.dag = dag diff --git a/tests/providers/amazon/aws/transfers/test_imap_attachment_to_s3.py b/tests/providers/amazon/aws/transfers/test_imap_attachment_to_s3.py index 1663f90eac462..acb5bb713461b 100644 --- a/tests/providers/amazon/aws/transfers/test_imap_attachment_to_s3.py +++ b/tests/providers/amazon/aws/transfers/test_imap_attachment_to_s3.py @@ -17,14 +17,13 @@ # under the License. from __future__ import annotations -import unittest -from unittest.mock import patch +from unittest import mock from airflow.providers.amazon.aws.transfers.imap_attachment_to_s3 import ImapAttachmentToS3Operator -class TestImapAttachmentToS3Operator(unittest.TestCase): - def setUp(self): +class TestImapAttachmentToS3Operator: + def setup_method(self): self.kwargs = dict( imap_attachment_name="test_file", s3_bucket="test_bucket", @@ -37,8 +36,8 @@ def setUp(self): dag=None, ) - @patch("airflow.providers.amazon.aws.transfers.imap_attachment_to_s3.S3Hook") - @patch("airflow.providers.amazon.aws.transfers.imap_attachment_to_s3.ImapHook") + @mock.patch("airflow.providers.amazon.aws.transfers.imap_attachment_to_s3.S3Hook") + @mock.patch("airflow.providers.amazon.aws.transfers.imap_attachment_to_s3.ImapHook") def test_execute(self, mock_imap_hook, mock_s3_hook): mock_imap_hook.return_value.__enter__ = mock_imap_hook mock_imap_hook.return_value.retrieve_mail_attachments.return_value = [("test_file", b"Hello World")] diff --git a/tests/providers/amazon/aws/transfers/test_local_to_s3.py b/tests/providers/amazon/aws/transfers/test_local_to_s3.py index 2d53d19256c93..ad811a0a0c886 100644 --- a/tests/providers/amazon/aws/transfers/test_local_to_s3.py +++ b/tests/providers/amazon/aws/transfers/test_local_to_s3.py @@ -19,20 +19,19 @@ import datetime import os -import unittest import boto3 +import pytest from moto import mock_s3 from airflow.models.dag import DAG from airflow.providers.amazon.aws.transfers.local_to_s3 import LocalFilesystemToS3Operator +CONFIG = {"verify": False, "replace": False, "encrypt": False, "gzip": False} -class TestFileToS3Operator(unittest.TestCase): - _config = {"verify": False, "replace": False, "encrypt": False, "gzip": False} - - def setUp(self): +class TestFileToS3Operator: + def setup_method(self): args = {"owner": "airflow", "start_date": datetime.datetime(2017, 1, 1)} self.dag = DAG("test_dag_id", default_args=args) self.dest_key = "test/test1.csv" @@ -41,7 +40,7 @@ def setUp(self): with open(self.testfile1, "wb") as f: f.write(b"x" * 393216) - def tearDown(self): + def teardown_method(self): os.remove(self.testfile1) def test_init(self): @@ -51,15 +50,15 @@ def test_init(self): filename=self.testfile1, dest_key=self.dest_key, dest_bucket=self.dest_bucket, - **self._config, + **CONFIG, ) assert operator.filename == self.testfile1 assert operator.dest_key == self.dest_key assert operator.dest_bucket == self.dest_bucket - assert operator.verify == self._config["verify"] - assert operator.replace == self._config["replace"] - assert operator.encrypt == self._config["encrypt"] - assert operator.gzip == self._config["gzip"] + assert operator.verify == CONFIG["verify"] + assert operator.replace == CONFIG["replace"] + assert operator.encrypt == CONFIG["encrypt"] + assert operator.gzip == CONFIG["gzip"] def test_execute_exception(self): operator = LocalFilesystemToS3Operator( @@ -68,9 +67,9 @@ def test_execute_exception(self): filename=self.testfile1, dest_key=f"s3://dummy/{self.dest_key}", dest_bucket=self.dest_bucket, - **self._config, + **CONFIG, ) - with self.assertRaises(TypeError): + with pytest.raises(TypeError): operator.execute(None) @mock_s3 @@ -83,7 +82,7 @@ def test_execute(self): filename=self.testfile1, dest_key=self.dest_key, dest_bucket=self.dest_bucket, - **self._config, + **CONFIG, ) operator.execute(None) @@ -102,7 +101,7 @@ def test_execute_with_only_key(self): dag=self.dag, filename=self.testfile1, dest_key=f"s3://dummy/{self.dest_key}", - **self._config, + **CONFIG, ) operator.execute(None) diff --git a/tests/providers/amazon/aws/transfers/test_mongo_to_s3.py b/tests/providers/amazon/aws/transfers/test_mongo_to_s3.py index 9fb06866fb47c..f2bd53318c3e8 100644 --- a/tests/providers/amazon/aws/transfers/test_mongo_to_s3.py +++ b/tests/providers/amazon/aws/transfers/test_mongo_to_s3.py @@ -17,7 +17,6 @@ # under the License. from __future__ import annotations -import unittest from unittest import mock from airflow.models import DAG, DagRun, TaskInstance @@ -40,8 +39,8 @@ ] -class TestMongoToS3Operator(unittest.TestCase): - def setUp(self): +class TestMongoToS3Operator: + def setup_method(self): args = {"owner": "airflow", "start_date": DEFAULT_DATE} self.dag = DAG("test_dag_id", default_args=args) diff --git a/tests/providers/amazon/aws/transfers/test_redshift_to_s3.py b/tests/providers/amazon/aws/transfers/test_redshift_to_s3.py index a94a3fce6d19d..91a1ae32da362 100644 --- a/tests/providers/amazon/aws/transfers/test_redshift_to_s3.py +++ b/tests/providers/amazon/aws/transfers/test_redshift_to_s3.py @@ -17,11 +17,10 @@ # under the License. from __future__ import annotations -import unittest from unittest import mock +import pytest from boto3.session import Session -from parameterized import parameterized from airflow.models.connection import Connection from airflow.providers.amazon.aws.transfers.redshift_to_s3 import RedshiftToS3Operator @@ -29,25 +28,20 @@ from tests.test_utils.asserts import assert_equal_ignore_multiple_spaces -class TestRedshiftToS3Transfer(unittest.TestCase): - @parameterized.expand( - [ - [True, "key/table_"], - [False, "key"], - ] - ) +class TestRedshiftToS3Transfer: + @pytest.mark.parametrize("table_as_file_name, expected_s3_key", [[True, "key/table_"], [False, "key"]]) @mock.patch("airflow.providers.amazon.aws.hooks.s3.S3Hook.get_connection") @mock.patch("airflow.models.connection.Connection") @mock.patch("boto3.session.Session") @mock.patch("airflow.providers.amazon.aws.hooks.redshift_sql.RedshiftSQLHook.run") def test_table_unloading( self, - table_as_file_name, - expected_s3_key, mock_run, mock_session, mock_connection, mock_hook, + table_as_file_name, + expected_s3_key, ): access_key = "aws_access_key_id" secret_key = "aws_secret_access_key" @@ -94,24 +88,19 @@ def test_table_unloading( assert secret_key in unload_query assert_equal_ignore_multiple_spaces(self, mock_run.call_args[0][0], unload_query) - @parameterized.expand( - [ - [True, "key/table_"], - [False, "key"], - ] - ) + @pytest.mark.parametrize("table_as_file_name, expected_s3_key", [[True, "key/table_"], [False, "key"]]) @mock.patch("airflow.providers.amazon.aws.hooks.s3.S3Hook.get_connection") @mock.patch("airflow.models.connection.Connection") @mock.patch("boto3.session.Session") @mock.patch("airflow.providers.amazon.aws.hooks.redshift_sql.RedshiftSQLHook.run") def test_execute_sts_token( self, - table_as_file_name, - expected_s3_key, mock_run, mock_session, mock_connection, mock_hook, + table_as_file_name, + expected_s3_key, ): access_key = "ASIA_aws_access_key_id" secret_key = "aws_secret_access_key" @@ -160,13 +149,14 @@ def test_execute_sts_token( assert token in unload_query assert_equal_ignore_multiple_spaces(self, mock_run.call_args[0][0], unload_query) - @parameterized.expand( + @pytest.mark.parametrize( + "table, table_as_file_name, expected_s3_key", [ ["table", True, "key/table_"], ["table", False, "key"], [None, False, "key"], [None, True, "key"], - ] + ], ) @mock.patch("airflow.providers.amazon.aws.hooks.s3.S3Hook.get_connection") @mock.patch("airflow.models.connection.Connection") @@ -174,13 +164,13 @@ def test_execute_sts_token( @mock.patch("airflow.providers.amazon.aws.hooks.redshift_sql.RedshiftSQLHook.run") def test_custom_select_query_unloading( self, - table, - table_as_file_name, - expected_s3_key, mock_run, mock_session, mock_connection, mock_hook, + table, + table_as_file_name, + expected_s3_key, ): access_key = "aws_access_key_id" secret_key = "aws_secret_access_key" @@ -225,24 +215,19 @@ def test_custom_select_query_unloading( assert secret_key in unload_query assert_equal_ignore_multiple_spaces(self, mock_run.call_args[0][0], unload_query) - @parameterized.expand( - [ - [True, "key/table_"], - [False, "key"], - ] - ) + @pytest.mark.parametrize("table_as_file_name, expected_s3_key", [[True, "key/table_"], [False, "key"]]) @mock.patch("airflow.providers.amazon.aws.hooks.s3.S3Hook.get_connection") @mock.patch("airflow.models.connection.Connection") @mock.patch("boto3.session.Session") @mock.patch("airflow.providers.amazon.aws.hooks.redshift_sql.RedshiftSQLHook.run") def test_table_unloading_role_arn( self, - table_as_file_name, - expected_s3_key, mock_run, mock_session, mock_connection, mock_hook, + table_as_file_name, + expected_s3_key, ): access_key = "aws_access_key_id" secret_key = "aws_secret_access_key" diff --git a/tests/providers/amazon/aws/transfers/test_s3_to_ftp.py b/tests/providers/amazon/aws/transfers/test_s3_to_ftp.py index 3bb28b692a26e..6308d34ac020a 100644 --- a/tests/providers/amazon/aws/transfers/test_s3_to_ftp.py +++ b/tests/providers/amazon/aws/transfers/test_s3_to_ftp.py @@ -17,7 +17,6 @@ # under the License. from __future__ import annotations -import unittest from unittest import mock from airflow.providers.amazon.aws.transfers.s3_to_ftp import S3ToFTPOperator @@ -30,7 +29,7 @@ FTP_CONN_ID = "ftp_default" -class TestS3ToFTPOperator(unittest.TestCase): +class TestS3ToFTPOperator: @mock.patch("airflow.providers.ftp.hooks.ftp.FTPHook.store_file") @mock.patch("airflow.providers.amazon.aws.hooks.s3.S3Hook.get_key") @mock.patch("airflow.providers.amazon.aws.transfers.s3_to_ftp.NamedTemporaryFile") diff --git a/tests/providers/amazon/aws/transfers/test_s3_to_redshift.py b/tests/providers/amazon/aws/transfers/test_s3_to_redshift.py index 56c72a0354af3..e69673b27e36d 100644 --- a/tests/providers/amazon/aws/transfers/test_s3_to_redshift.py +++ b/tests/providers/amazon/aws/transfers/test_s3_to_redshift.py @@ -17,7 +17,6 @@ # under the License. from __future__ import annotations -import unittest from unittest import mock import pytest @@ -29,7 +28,7 @@ from tests.test_utils.asserts import assert_equal_ignore_multiple_spaces -class TestS3ToRedshiftTransfer(unittest.TestCase): +class TestS3ToRedshiftTransfer: @mock.patch("airflow.providers.amazon.aws.hooks.s3.S3Hook.get_connection") @mock.patch("airflow.models.connection.Connection") @mock.patch("boto3.session.Session") diff --git a/tests/providers/amazon/aws/transfers/test_s3_to_sftp.py b/tests/providers/amazon/aws/transfers/test_s3_to_sftp.py index cef89e396740d..74b5273f652e1 100644 --- a/tests/providers/amazon/aws/transfers/test_s3_to_sftp.py +++ b/tests/providers/amazon/aws/transfers/test_s3_to_sftp.py @@ -17,13 +17,13 @@ # under the License. from __future__ import annotations -import unittest - import boto3 from moto import mock_s3 from airflow.models import DAG +from airflow.providers.amazon.aws.hooks.s3 import S3Hook from airflow.providers.amazon.aws.transfers.s3_to_sftp import S3ToSFTPOperator +from airflow.providers.ssh.hooks.ssh import SSHHook from airflow.providers.ssh.operators.ssh import SSHOperator from airflow.utils.timezone import datetime from tests.test_utils.config import conf_vars @@ -42,14 +42,10 @@ DEFAULT_DATE = datetime(2018, 1, 1) -class TestS3ToSFTPOperator(unittest.TestCase): - @mock_s3 - def setUp(self): - from airflow.providers.amazon.aws.hooks.s3 import S3Hook - from airflow.providers.ssh.hooks.ssh import SSHHook +class TestS3ToSFTPOperator: + def setup_method(self): hook = SSHHook(ssh_conn_id="ssh_default") - s3_hook = S3Hook("aws_default") hook.no_host_key_check = True dag = DAG( f"{TEST_DAG_ID}test_schedule_dag_once", @@ -58,7 +54,6 @@ def setUp(self): ) self.hook = hook - self.s3_hook = s3_hook self.ssh_client = self.hook.get_conn() self.sftp_client = self.ssh_client.open_sftp() @@ -71,6 +66,7 @@ def setUp(self): @mock_s3 @conf_vars({("core", "enable_xcom_pickling"): "True"}) def test_s3_to_sftp_operation(self): + s3_hook = S3Hook(aws_conn_id=None) # Setting test_remote_file_content = ( "This is remote file content \n which is also multiline " @@ -80,11 +76,11 @@ def test_s3_to_sftp_operation(self): # Test for creation of s3 bucket conn = boto3.client("s3") conn.create_bucket(Bucket=self.s3_bucket) - assert self.s3_hook.check_for_bucket(self.s3_bucket) + assert s3_hook.check_for_bucket(self.s3_bucket) with open(LOCAL_FILE_PATH, "w") as file: file.write(test_remote_file_content) - self.s3_hook.load_file(LOCAL_FILE_PATH, self.s3_key, bucket_name=BUCKET) + s3_hook.load_file(LOCAL_FILE_PATH, self.s3_key, bucket_name=BUCKET) # Check if object was created in s3 objects_in_dest_bucket = conn.list_objects(Bucket=self.s3_bucket, Prefix=self.s3_key) @@ -122,7 +118,7 @@ def test_s3_to_sftp_operation(self): # Clean up after finishing with test conn.delete_object(Bucket=self.s3_bucket, Key=self.s3_key) conn.delete_bucket(Bucket=self.s3_bucket) - assert not self.s3_hook.check_for_bucket(self.s3_bucket) + assert not s3_hook.check_for_bucket(self.s3_bucket) def delete_remote_resource(self): # check the remote file content @@ -136,5 +132,5 @@ def delete_remote_resource(self): assert remove_file_task is not None remove_file_task.execute(None) - def tearDown(self): + def teardown_method(self): self.delete_remote_resource() diff --git a/tests/providers/amazon/aws/transfers/test_salesforce_to_s3.py b/tests/providers/amazon/aws/transfers/test_salesforce_to_s3.py index b861a2cdc82f2..5dcc1890c244b 100644 --- a/tests/providers/amazon/aws/transfers/test_salesforce_to_s3.py +++ b/tests/providers/amazon/aws/transfers/test_salesforce_to_s3.py @@ -16,7 +16,6 @@ # under the License. from __future__ import annotations -import unittest from collections import OrderedDict from unittest import mock @@ -55,7 +54,7 @@ ACL_POLICY = None -class TestSalesforceToGcsOperator(unittest.TestCase): +class TestSalesforceToGcsOperator: @mock.patch.object(S3Hook, "load_file") @mock.patch.object(SalesforceHook, "write_object_to_file") @mock.patch.object(SalesforceHook, "make_query") diff --git a/tests/providers/amazon/aws/transfers/test_sftp_to_s3.py b/tests/providers/amazon/aws/transfers/test_sftp_to_s3.py index 4a23d54d63fe3..d3a301c823030 100644 --- a/tests/providers/amazon/aws/transfers/test_sftp_to_s3.py +++ b/tests/providers/amazon/aws/transfers/test_sftp_to_s3.py @@ -17,11 +17,9 @@ # under the License. from __future__ import annotations -import unittest - import boto3 +import pytest from moto import mock_s3 -from parameterized import parameterized from airflow.models import DAG from airflow.providers.amazon.aws.hooks.s3 import S3Hook @@ -44,12 +42,10 @@ DEFAULT_DATE = datetime(2018, 1, 1) -class TestSFTPToS3Operator(unittest.TestCase): - @mock_s3 - def setUp(self): +class TestSFTPToS3Operator: + def setup_method(self): hook = SSHHook(ssh_conn_id="ssh_default") - s3_hook = S3Hook("aws_default") hook.no_host_key_check = True dag = DAG( f"{TEST_DAG_ID}test_schedule_dag_once", @@ -58,7 +54,6 @@ def setUp(self): ) self.hook = hook - self.s3_hook = s3_hook self.ssh_client = self.hook.get_conn() self.sftp_client = self.ssh_client.open_sftp() @@ -68,15 +63,10 @@ def setUp(self): self.sftp_path = SFTP_PATH self.s3_key = S3_KEY - @parameterized.expand( - [ - (True,), - (False,), - ] - ) + @pytest.mark.parametrize("use_temp_file", [True, False]) @mock_s3 @conf_vars({("core", "enable_xcom_pickling"): "True"}) - def test_sftp_to_s3_operation(self, use_temp_file=True): + def test_sftp_to_s3_operation(self, use_temp_file): # Setting test_remote_file_content = ( "This is remote file content \n which is also multiline " @@ -95,9 +85,10 @@ def test_sftp_to_s3_operation(self, use_temp_file=True): create_file_task.execute(None) # Test for creation of s3 bucket + s3_hook = S3Hook(aws_conn_id=None) conn = boto3.client("s3") conn.create_bucket(Bucket=self.s3_bucket) - assert self.s3_hook.check_for_bucket(self.s3_bucket) + assert s3_hook.check_for_bucket(self.s3_bucket) # get remote file to local run_task = SFTPToS3Operator( @@ -125,4 +116,4 @@ def test_sftp_to_s3_operation(self, use_temp_file=True): # Clean up after finishing with test conn.delete_object(Bucket=self.s3_bucket, Key=self.s3_key) conn.delete_bucket(Bucket=self.s3_bucket) - assert not self.s3_hook.check_for_bucket(self.s3_bucket) + assert not s3_hook.check_for_bucket(self.s3_bucket) diff --git a/tests/providers/amazon/aws/transfers/test_sql_to_s3.py b/tests/providers/amazon/aws/transfers/test_sql_to_s3.py index fd7f25a009fe3..84db615eebccf 100644 --- a/tests/providers/amazon/aws/transfers/test_sql_to_s3.py +++ b/tests/providers/amazon/aws/transfers/test_sql_to_s3.py @@ -17,19 +17,17 @@ # under the License. from __future__ import annotations -import unittest from tempfile import NamedTemporaryFile from unittest import mock import pandas as pd import pytest -from parameterized import parameterized from airflow.exceptions import AirflowException from airflow.providers.amazon.aws.transfers.sql_to_s3 import SqlToS3Operator -class TestSqlToS3Operator(unittest.TestCase): +class TestSqlToS3Operator: @mock.patch("airflow.providers.amazon.aws.transfers.sql_to_s3.NamedTemporaryFile") @mock.patch("airflow.providers.amazon.aws.transfers.sql_to_s3.S3Hook") def test_execute_csv(self, mock_s3_hook, temp_mock): @@ -146,13 +144,14 @@ def test_execute_json(self, mock_s3_hook, temp_mock): replace=True, ) - @parameterized.expand( + @pytest.mark.parametrize( + "params", [ - ("with-csv", {"file_format": "csv", "null_string_result": None}), - ("with-parquet", {"file_format": "parquet", "null_string_result": "None"}), - ] + pytest.param({"file_format": "csv", "null_string_result": None}, id="with-csv"), + pytest.param({"file_format": "parquet", "null_string_result": "None"}, id="with-parquet"), + ], ) - def test_fix_dtypes(self, _, params): + def test_fix_dtypes(self, params): op = SqlToS3Operator( query="query", s3_bucket="s3_bucket",