Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion airflow/hooks/dbapi_hook.py
Original file line number Diff line number Diff line change
Expand Up @@ -243,7 +243,7 @@ def _generate_insert_sql(table, values, target_fields, replace, **kwargs):
:return: The generated INSERT or REPLACE SQL statement
:rtype: str
"""
placeholders = ["%s", ] * len(values)
placeholders = ["%s"] * len(values)

if target_fields:
target_fields = ", ".join(target_fields)
Expand Down
4 changes: 2 additions & 2 deletions airflow/lineage/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ def wrapper(self, context, *args, **kwargs):
self.log.debug("Preparing lineage inlets and outlets")

if isinstance(self._inlets, (str, Operator)) or attr.has(self._inlets):
self._inlets = [self._inlets, ]
self._inlets = [self._inlets]

if self._inlets and isinstance(self._inlets, list):
# get task_ids that are specified as parameter and make sure they are upstream
Expand Down Expand Up @@ -158,7 +158,7 @@ def wrapper(self, context, *args, **kwargs):
raise AttributeError("inlets is not a list, operator, string or attr annotated object")

if not isinstance(self._outlets, list):
self._outlets = [self._outlets, ]
self._outlets = [self._outlets]

self.outlets.extend(self._outlets)

Expand Down
4 changes: 2 additions & 2 deletions airflow/models/baseoperator.py
Original file line number Diff line number Diff line change
Expand Up @@ -442,10 +442,10 @@ def __init__(
self._outlets: List = []

if inlets:
self._inlets = inlets if isinstance(inlets, list) else [inlets, ]
self._inlets = inlets if isinstance(inlets, list) else [inlets]

if outlets:
self._outlets = outlets if isinstance(outlets, list) else [outlets, ]
self._outlets = outlets if isinstance(outlets, list) else [outlets]

def __eq__(self, other):
if type(self) is type(other) and self.task_id == other.task_id:
Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/amazon/aws/operators/redshift_to_s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ def __init__( # pylint: disable=too-many-arguments
self.table_as_file_name = table_as_file_name

if self.include_header and 'HEADER' not in [uo.upper().strip() for uo in self.unload_options]:
self.unload_options = list(self.unload_options) + ['HEADER', ]
self.unload_options = list(self.unload_options) + ['HEADER']

def execute(self, context):
postgres_hook = PostgresHook(postgres_conn_id=self.redshift_conn_id)
Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/http/operators/http.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ class SimpleHttpOperator(BaseOperator):
:type log_response: bool
"""

template_fields = ['endpoint', 'data', 'headers', ]
template_fields = ['endpoint', 'data', 'headers']
template_ext = ()
ui_color = '#f4a460'

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ class AzureContainerInstancesOperator(BaseOperator):
"my_storage_container",
"my_fileshare",
"/input-data",
True),],
True)],
memory_in_gb=14.0,
cpu=4.0,
gpu=GpuResource(count=1, sku='K80'),
Expand Down Expand Up @@ -174,7 +174,7 @@ def execute(self, context):

if self.registry_conn_id:
registry_hook = AzureContainerRegistryHook(self.registry_conn_id)
image_registry_credentials = [registry_hook.connection, ]
image_registry_credentials = [registry_hook.connection]
else:
image_registry_credentials = None

Expand Down Expand Up @@ -223,7 +223,7 @@ def execute(self, context):

container_group = ContainerGroup(
location=self.region,
containers=[container, ],
containers=[container],
image_registry_credentials=image_registry_credentials,
volumes=volumes,
restart_policy='Never',
Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/postgres/hooks/postgres.py
Original file line number Diff line number Diff line change
Expand Up @@ -201,7 +201,7 @@ def _generate_insert_sql(table, values, target_fields, replace, **kwargs):
:return: The generated INSERT or REPLACE SQL statement
:rtype: str
"""
placeholders = ["%s", ] * len(values)
placeholders = ["%s"] * len(values)
replace_index = kwargs.get("replace_index", None)

if target_fields:
Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/slack/operators/slack_webhook.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ class SlackWebhookOperator(SimpleHttpOperator):
"""

template_fields = ['webhook_token', 'message', 'attachments', 'blocks', 'channel',
'username', 'proxy', ]
'username', 'proxy']

# pylint: disable=too-many-arguments
@apply_defaults
Expand Down
2 changes: 1 addition & 1 deletion docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -462,7 +462,7 @@
'Apache Airflow', 'Airflow',
'Airflow is a system to programmatically author, schedule and monitor data pipelines.',
'Miscellaneous'
), ]
)]

# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
Expand Down
2 changes: 1 addition & 1 deletion requirements/setup-3.6.md5
Original file line number Diff line number Diff line change
@@ -1 +1 @@
0e0464b2825b47f66257b054e2563e54 /opt/airflow/setup.py
80605a94fcebdae45a455abdcf77cd2f /opt/airflow/setup.py
2 changes: 1 addition & 1 deletion requirements/setup-3.7.md5
Original file line number Diff line number Diff line change
@@ -1 +1 @@
0e0464b2825b47f66257b054e2563e54 /opt/airflow/setup.py
80605a94fcebdae45a455abdcf77cd2f /opt/airflow/setup.py
6 changes: 4 additions & 2 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -354,7 +354,7 @@ def write_version(filename: str = os.path.join(*[my_dir, "airflow", "git_version
'psycopg2-binary>=2.7.4',
]
presto = [
'presto-python-client>=0.7.0,<0.8'
'presto-python-client>=0.7.0,<0.8',
]
qds = [
'qds-sdk>=1.10.4',
Expand All @@ -381,7 +381,9 @@ def write_version(filename: str = os.path.join(*[my_dir, "airflow", "git_version
'blinker>=1.1',
'sentry-sdk>=0.8.0',
]
singularity = ['spython>=0.0.56']
singularity = [
'spython>=0.0.56',
]
slack = [
'slackclient>=2.0.0,<3.0.0',
]
Expand Down
2 changes: 1 addition & 1 deletion tests/cli/commands/test_dag_command.py
Original file line number Diff line number Diff line change
Expand Up @@ -331,7 +331,7 @@ def test_cli_list_dags(self):

def test_cli_list_dag_runs(self):
dag_command.dag_trigger(self.parser.parse_args([
'dags', 'trigger', 'example_bash_operator', ]))
'dags', 'trigger', 'example_bash_operator']))
args = self.parser.parse_args(['dags',
'list_runs',
'--dag-id',
Expand Down
2 changes: 1 addition & 1 deletion tests/lineage/test_lineage.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def test_lineage(self):
with dag:
op1 = DummyOperator(task_id='leave1',
inlets=file1,
outlets=[file2, ])
outlets=[file2])
op2 = DummyOperator(task_id='leave2')
op3 = DummyOperator(task_id='upstream_level_1',
inlets=AUTO,
Expand Down
2 changes: 1 addition & 1 deletion tests/operators/test_generic_transfer.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def tearDown(self):
for table in drop_tables:
conn.execute("DROP TABLE IF EXISTS {}".format(table))

@parameterized.expand([("mysqlclient",), ("mysql-connector-python",), ])
@parameterized.expand([("mysqlclient",), ("mysql-connector-python",)])
def test_mysql_to_mysql(self, client):
with MySqlContext(client):
sql = "SELECT * FROM INFORMATION_SCHEMA.TABLES LIMIT 100;"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def test_execute(self, table_as_file_name, expected_s3_key, mock_run, mock_sessi
table = "table"
s3_bucket = "bucket"
s3_key = "key"
unload_options = ['HEADER', ]
unload_options = ['HEADER']

RedshiftToS3Transfer(
schema=schema,
Expand Down
6 changes: 4 additions & 2 deletions tests/providers/google/cloud/operators/test_gcs_to_gcs.py
Original file line number Diff line number Diff line change
Expand Up @@ -471,7 +471,8 @@ def test_executes_with_no_destination_bucket_and_no_destination_object(self, moc
mock.call(TEST_BUCKET, 'test_object/file2.txt',
TEST_BUCKET, 'test_object/file2.txt'),
mock.call(TEST_BUCKET, 'test_object/file3.json',
TEST_BUCKET, 'test_object/file3.json'), ]
TEST_BUCKET, 'test_object/file3.json'),
]
mock_hook.return_value.rewrite.assert_has_calls(mock_calls)

@mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook')
Expand All @@ -497,7 +498,8 @@ def test_wc_with_last_modified_time_with_all_true_cond_no_file(self, mock_hook):
mock.call(
TEST_BUCKET, 'test_object/file3.json',
DESTINATION_BUCKET, 'test_object/file3.json'
), ]
),
]
mock_hook.return_value.rewrite.assert_has_calls(mock_calls_none)


Expand Down
6 changes: 3 additions & 3 deletions tests/providers/mysql/hooks/test_mysql.py
Original file line number Diff line number Diff line change
Expand Up @@ -353,7 +353,7 @@ def tearDown(self):
for table in drop_tables:
conn.execute("DROP TABLE IF EXISTS {}".format(table))

@parameterized.expand([("mysqlclient",), ("mysql-connector-python",), ])
@parameterized.expand([("mysqlclient",), ("mysql-connector-python",)])
def test_mysql_hook_test_bulk_load(self, client):
with MySqlContext(client):
records = ("foo", "bar", "baz")
Expand All @@ -376,7 +376,7 @@ def test_mysql_hook_test_bulk_load(self, client):
results = tuple(result[0] for result in conn.fetchall())
self.assertEqual(sorted(results), sorted(records))

@parameterized.expand([("mysqlclient",), ("mysql-connector-python",), ])
@parameterized.expand([("mysqlclient",), ("mysql-connector-python",)])
def test_mysql_hook_test_bulk_dump(self, client):
with MySqlContext(client):
hook = MySqlHook('airflow_db')
Expand All @@ -388,7 +388,7 @@ def test_mysql_hook_test_bulk_dump(self, client):
self.skipTest("Skip test_mysql_hook_test_bulk_load "
"since file output is not permitted")

@parameterized.expand([("mysqlclient",), ("mysql-connector-python",), ])
@parameterized.expand([("mysqlclient",), ("mysql-connector-python",)])
@mock.patch('airflow.providers.mysql.hooks.mysql.MySqlHook.get_conn')
def test_mysql_hook_test_bulk_dump_mock(self, client, mock_get_conn):
with MySqlContext(client):
Expand Down
6 changes: 3 additions & 3 deletions tests/providers/mysql/operators/test_mysql.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def tearDown(self):
for table in drop_tables:
conn.execute("DROP TABLE IF EXISTS {}".format(table))

@parameterized.expand([("mysqlclient",), ("mysql-connector-python",), ])
@parameterized.expand([("mysqlclient",), ("mysql-connector-python",)])
def test_mysql_operator_test(self, client):
with MySqlContext(client):
sql = """
Expand All @@ -62,7 +62,7 @@ def test_mysql_operator_test(self, client):
dag=self.dag)
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)

@parameterized.expand([("mysqlclient",), ("mysql-connector-python",), ])
@parameterized.expand([("mysqlclient",), ("mysql-connector-python",)])
def test_mysql_operator_test_multi(self, client):
with MySqlContext(client):
sql = [
Expand All @@ -77,7 +77,7 @@ def test_mysql_operator_test_multi(self, client):
)
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)

@parameterized.expand([("mysqlclient",), ("mysql-connector-python",), ])
@parameterized.expand([("mysqlclient",), ("mysql-connector-python",)])
def test_overwrite_schema(self, client):
"""
Verifies option to overwrite connection schema
Expand Down
4 changes: 2 additions & 2 deletions tests/utils/test_logging_mixin.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,9 @@ def test_set_context(self):
handler2 = mock.MagicMock()
parent = mock.MagicMock()
parent.propagate = False
parent.handlers = [handler1, ]
parent.handlers = [handler1]
log = mock.MagicMock()
log.handlers = [handler2, ]
log.handlers = [handler2]
log.parent = parent
log.propagate = True

Expand Down