diff --git a/airflow/hooks/dbapi_hook.py b/airflow/hooks/dbapi_hook.py index c04f64fe80546..531596e10461b 100644 --- a/airflow/hooks/dbapi_hook.py +++ b/airflow/hooks/dbapi_hook.py @@ -243,7 +243,7 @@ def _generate_insert_sql(table, values, target_fields, replace, **kwargs): :return: The generated INSERT or REPLACE SQL statement :rtype: str """ - placeholders = ["%s", ] * len(values) + placeholders = ["%s"] * len(values) if target_fields: target_fields = ", ".join(target_fields) diff --git a/airflow/lineage/__init__.py b/airflow/lineage/__init__.py index f3c043bb0fc87..3c8e8d95c06b4 100644 --- a/airflow/lineage/__init__.py +++ b/airflow/lineage/__init__.py @@ -129,7 +129,7 @@ def wrapper(self, context, *args, **kwargs): self.log.debug("Preparing lineage inlets and outlets") if isinstance(self._inlets, (str, Operator)) or attr.has(self._inlets): - self._inlets = [self._inlets, ] + self._inlets = [self._inlets] if self._inlets and isinstance(self._inlets, list): # get task_ids that are specified as parameter and make sure they are upstream @@ -158,7 +158,7 @@ def wrapper(self, context, *args, **kwargs): raise AttributeError("inlets is not a list, operator, string or attr annotated object") if not isinstance(self._outlets, list): - self._outlets = [self._outlets, ] + self._outlets = [self._outlets] self.outlets.extend(self._outlets) diff --git a/airflow/models/baseoperator.py b/airflow/models/baseoperator.py index 85f01dccb8949..7145c075287b0 100644 --- a/airflow/models/baseoperator.py +++ b/airflow/models/baseoperator.py @@ -442,10 +442,10 @@ def __init__( self._outlets: List = [] if inlets: - self._inlets = inlets if isinstance(inlets, list) else [inlets, ] + self._inlets = inlets if isinstance(inlets, list) else [inlets] if outlets: - self._outlets = outlets if isinstance(outlets, list) else [outlets, ] + self._outlets = outlets if isinstance(outlets, list) else [outlets] def __eq__(self, other): if type(self) is type(other) and self.task_id == other.task_id: diff --git a/airflow/providers/amazon/aws/operators/redshift_to_s3.py b/airflow/providers/amazon/aws/operators/redshift_to_s3.py index aa44375aba98f..a70a5ab8f2da2 100644 --- a/airflow/providers/amazon/aws/operators/redshift_to_s3.py +++ b/airflow/providers/amazon/aws/operators/redshift_to_s3.py @@ -98,7 +98,7 @@ def __init__( # pylint: disable=too-many-arguments self.table_as_file_name = table_as_file_name if self.include_header and 'HEADER' not in [uo.upper().strip() for uo in self.unload_options]: - self.unload_options = list(self.unload_options) + ['HEADER', ] + self.unload_options = list(self.unload_options) + ['HEADER'] def execute(self, context): postgres_hook = PostgresHook(postgres_conn_id=self.redshift_conn_id) diff --git a/airflow/providers/http/operators/http.py b/airflow/providers/http/operators/http.py index b18ffc05e6e69..38ddd02e11308 100644 --- a/airflow/providers/http/operators/http.py +++ b/airflow/providers/http/operators/http.py @@ -50,7 +50,7 @@ class SimpleHttpOperator(BaseOperator): :type log_response: bool """ - template_fields = ['endpoint', 'data', 'headers', ] + template_fields = ['endpoint', 'data', 'headers'] template_ext = () ui_color = '#f4a460' diff --git a/airflow/providers/microsoft/azure/operators/azure_container_instances.py b/airflow/providers/microsoft/azure/operators/azure_container_instances.py index 056c4ba6f2c88..db433b984b801 100644 --- a/airflow/providers/microsoft/azure/operators/azure_container_instances.py +++ b/airflow/providers/microsoft/azure/operators/azure_container_instances.py @@ -108,7 +108,7 @@ class AzureContainerInstancesOperator(BaseOperator): "my_storage_container", "my_fileshare", "/input-data", - True),], + True)], memory_in_gb=14.0, cpu=4.0, gpu=GpuResource(count=1, sku='K80'), @@ -174,7 +174,7 @@ def execute(self, context): if self.registry_conn_id: registry_hook = AzureContainerRegistryHook(self.registry_conn_id) - image_registry_credentials = [registry_hook.connection, ] + image_registry_credentials = [registry_hook.connection] else: image_registry_credentials = None @@ -223,7 +223,7 @@ def execute(self, context): container_group = ContainerGroup( location=self.region, - containers=[container, ], + containers=[container], image_registry_credentials=image_registry_credentials, volumes=volumes, restart_policy='Never', diff --git a/airflow/providers/postgres/hooks/postgres.py b/airflow/providers/postgres/hooks/postgres.py index bf5fa4d094614..d2853955216dc 100644 --- a/airflow/providers/postgres/hooks/postgres.py +++ b/airflow/providers/postgres/hooks/postgres.py @@ -201,7 +201,7 @@ def _generate_insert_sql(table, values, target_fields, replace, **kwargs): :return: The generated INSERT or REPLACE SQL statement :rtype: str """ - placeholders = ["%s", ] * len(values) + placeholders = ["%s"] * len(values) replace_index = kwargs.get("replace_index", None) if target_fields: diff --git a/airflow/providers/slack/operators/slack_webhook.py b/airflow/providers/slack/operators/slack_webhook.py index 5e457a55f3879..d69ae62a4b721 100644 --- a/airflow/providers/slack/operators/slack_webhook.py +++ b/airflow/providers/slack/operators/slack_webhook.py @@ -59,7 +59,7 @@ class SlackWebhookOperator(SimpleHttpOperator): """ template_fields = ['webhook_token', 'message', 'attachments', 'blocks', 'channel', - 'username', 'proxy', ] + 'username', 'proxy'] # pylint: disable=too-many-arguments @apply_defaults diff --git a/docs/conf.py b/docs/conf.py index 95d2f5ec26d94..32d859468651b 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -462,7 +462,7 @@ 'Apache Airflow', 'Airflow', 'Airflow is a system to programmatically author, schedule and monitor data pipelines.', 'Miscellaneous' -), ] +)] # Documents to append as an appendix to all manuals. # texinfo_appendices = [] diff --git a/requirements/setup-3.6.md5 b/requirements/setup-3.6.md5 index 0f7178f4bc1a6..8a8c7548498ca 100644 --- a/requirements/setup-3.6.md5 +++ b/requirements/setup-3.6.md5 @@ -1 +1 @@ -0e0464b2825b47f66257b054e2563e54 /opt/airflow/setup.py +80605a94fcebdae45a455abdcf77cd2f /opt/airflow/setup.py diff --git a/requirements/setup-3.7.md5 b/requirements/setup-3.7.md5 index 0f7178f4bc1a6..8a8c7548498ca 100644 --- a/requirements/setup-3.7.md5 +++ b/requirements/setup-3.7.md5 @@ -1 +1 @@ -0e0464b2825b47f66257b054e2563e54 /opt/airflow/setup.py +80605a94fcebdae45a455abdcf77cd2f /opt/airflow/setup.py diff --git a/setup.py b/setup.py index e805224762fc4..7b18bfd7830e8 100644 --- a/setup.py +++ b/setup.py @@ -354,7 +354,7 @@ def write_version(filename: str = os.path.join(*[my_dir, "airflow", "git_version 'psycopg2-binary>=2.7.4', ] presto = [ - 'presto-python-client>=0.7.0,<0.8' + 'presto-python-client>=0.7.0,<0.8', ] qds = [ 'qds-sdk>=1.10.4', @@ -381,7 +381,9 @@ def write_version(filename: str = os.path.join(*[my_dir, "airflow", "git_version 'blinker>=1.1', 'sentry-sdk>=0.8.0', ] -singularity = ['spython>=0.0.56'] +singularity = [ + 'spython>=0.0.56', +] slack = [ 'slackclient>=2.0.0,<3.0.0', ] diff --git a/tests/cli/commands/test_dag_command.py b/tests/cli/commands/test_dag_command.py index dd117e268147b..e7f8d289ae9e4 100644 --- a/tests/cli/commands/test_dag_command.py +++ b/tests/cli/commands/test_dag_command.py @@ -331,7 +331,7 @@ def test_cli_list_dags(self): def test_cli_list_dag_runs(self): dag_command.dag_trigger(self.parser.parse_args([ - 'dags', 'trigger', 'example_bash_operator', ])) + 'dags', 'trigger', 'example_bash_operator'])) args = self.parser.parse_args(['dags', 'list_runs', '--dag-id', diff --git a/tests/lineage/test_lineage.py b/tests/lineage/test_lineage.py index dddaaaa70722d..7164b99e4b2ab 100644 --- a/tests/lineage/test_lineage.py +++ b/tests/lineage/test_lineage.py @@ -44,7 +44,7 @@ def test_lineage(self): with dag: op1 = DummyOperator(task_id='leave1', inlets=file1, - outlets=[file2, ]) + outlets=[file2]) op2 = DummyOperator(task_id='leave2') op3 = DummyOperator(task_id='upstream_level_1', inlets=AUTO, diff --git a/tests/operators/test_generic_transfer.py b/tests/operators/test_generic_transfer.py index 79355045e75dc..0252fd559d505 100644 --- a/tests/operators/test_generic_transfer.py +++ b/tests/operators/test_generic_transfer.py @@ -50,7 +50,7 @@ def tearDown(self): for table in drop_tables: conn.execute("DROP TABLE IF EXISTS {}".format(table)) - @parameterized.expand([("mysqlclient",), ("mysql-connector-python",), ]) + @parameterized.expand([("mysqlclient",), ("mysql-connector-python",)]) def test_mysql_to_mysql(self, client): with MySqlContext(client): sql = "SELECT * FROM INFORMATION_SCHEMA.TABLES LIMIT 100;" diff --git a/tests/providers/amazon/aws/operators/test_redshift_to_s3.py b/tests/providers/amazon/aws/operators/test_redshift_to_s3.py index 25fe8d6de5625..099a183eb8b5b 100644 --- a/tests/providers/amazon/aws/operators/test_redshift_to_s3.py +++ b/tests/providers/amazon/aws/operators/test_redshift_to_s3.py @@ -43,7 +43,7 @@ def test_execute(self, table_as_file_name, expected_s3_key, mock_run, mock_sessi table = "table" s3_bucket = "bucket" s3_key = "key" - unload_options = ['HEADER', ] + unload_options = ['HEADER'] RedshiftToS3Transfer( schema=schema, diff --git a/tests/providers/google/cloud/operators/test_gcs_to_gcs.py b/tests/providers/google/cloud/operators/test_gcs_to_gcs.py index 03bac1a7230b2..ddebc7f412c44 100644 --- a/tests/providers/google/cloud/operators/test_gcs_to_gcs.py +++ b/tests/providers/google/cloud/operators/test_gcs_to_gcs.py @@ -471,7 +471,8 @@ def test_executes_with_no_destination_bucket_and_no_destination_object(self, moc mock.call(TEST_BUCKET, 'test_object/file2.txt', TEST_BUCKET, 'test_object/file2.txt'), mock.call(TEST_BUCKET, 'test_object/file3.json', - TEST_BUCKET, 'test_object/file3.json'), ] + TEST_BUCKET, 'test_object/file3.json'), + ] mock_hook.return_value.rewrite.assert_has_calls(mock_calls) @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook') @@ -497,7 +498,8 @@ def test_wc_with_last_modified_time_with_all_true_cond_no_file(self, mock_hook): mock.call( TEST_BUCKET, 'test_object/file3.json', DESTINATION_BUCKET, 'test_object/file3.json' - ), ] + ), + ] mock_hook.return_value.rewrite.assert_has_calls(mock_calls_none) diff --git a/tests/providers/mysql/hooks/test_mysql.py b/tests/providers/mysql/hooks/test_mysql.py index 5d26bd166c7a9..a52cbe884c8e9 100644 --- a/tests/providers/mysql/hooks/test_mysql.py +++ b/tests/providers/mysql/hooks/test_mysql.py @@ -353,7 +353,7 @@ def tearDown(self): for table in drop_tables: conn.execute("DROP TABLE IF EXISTS {}".format(table)) - @parameterized.expand([("mysqlclient",), ("mysql-connector-python",), ]) + @parameterized.expand([("mysqlclient",), ("mysql-connector-python",)]) def test_mysql_hook_test_bulk_load(self, client): with MySqlContext(client): records = ("foo", "bar", "baz") @@ -376,7 +376,7 @@ def test_mysql_hook_test_bulk_load(self, client): results = tuple(result[0] for result in conn.fetchall()) self.assertEqual(sorted(results), sorted(records)) - @parameterized.expand([("mysqlclient",), ("mysql-connector-python",), ]) + @parameterized.expand([("mysqlclient",), ("mysql-connector-python",)]) def test_mysql_hook_test_bulk_dump(self, client): with MySqlContext(client): hook = MySqlHook('airflow_db') @@ -388,7 +388,7 @@ def test_mysql_hook_test_bulk_dump(self, client): self.skipTest("Skip test_mysql_hook_test_bulk_load " "since file output is not permitted") - @parameterized.expand([("mysqlclient",), ("mysql-connector-python",), ]) + @parameterized.expand([("mysqlclient",), ("mysql-connector-python",)]) @mock.patch('airflow.providers.mysql.hooks.mysql.MySqlHook.get_conn') def test_mysql_hook_test_bulk_dump_mock(self, client, mock_get_conn): with MySqlContext(client): diff --git a/tests/providers/mysql/operators/test_mysql.py b/tests/providers/mysql/operators/test_mysql.py index 7ea33592c851b..fcbaf34ddcdb8 100644 --- a/tests/providers/mysql/operators/test_mysql.py +++ b/tests/providers/mysql/operators/test_mysql.py @@ -48,7 +48,7 @@ def tearDown(self): for table in drop_tables: conn.execute("DROP TABLE IF EXISTS {}".format(table)) - @parameterized.expand([("mysqlclient",), ("mysql-connector-python",), ]) + @parameterized.expand([("mysqlclient",), ("mysql-connector-python",)]) def test_mysql_operator_test(self, client): with MySqlContext(client): sql = """ @@ -62,7 +62,7 @@ def test_mysql_operator_test(self, client): dag=self.dag) op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True) - @parameterized.expand([("mysqlclient",), ("mysql-connector-python",), ]) + @parameterized.expand([("mysqlclient",), ("mysql-connector-python",)]) def test_mysql_operator_test_multi(self, client): with MySqlContext(client): sql = [ @@ -77,7 +77,7 @@ def test_mysql_operator_test_multi(self, client): ) op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True) - @parameterized.expand([("mysqlclient",), ("mysql-connector-python",), ]) + @parameterized.expand([("mysqlclient",), ("mysql-connector-python",)]) def test_overwrite_schema(self, client): """ Verifies option to overwrite connection schema diff --git a/tests/utils/test_logging_mixin.py b/tests/utils/test_logging_mixin.py index b9550f4bd1fac..6b08b86a9f0d9 100644 --- a/tests/utils/test_logging_mixin.py +++ b/tests/utils/test_logging_mixin.py @@ -34,9 +34,9 @@ def test_set_context(self): handler2 = mock.MagicMock() parent = mock.MagicMock() parent.propagate = False - parent.handlers = [handler1, ] + parent.handlers = [handler1] log = mock.MagicMock() - log.handlers = [handler2, ] + log.handlers = [handler2] log.parent = parent log.propagate = True