diff --git a/airflow/providers/amazon/aws/operators/emr.py b/airflow/providers/amazon/aws/operators/emr.py index 2a042cc784f1f..95d5ef748816b 100644 --- a/airflow/providers/amazon/aws/operators/emr.py +++ b/airflow/providers/amazon/aws/operators/emr.py @@ -716,7 +716,9 @@ def __init__( warnings.warn( "The parameter waiter_countdown has been deprecated to standardize " "naming conventions. Please use waiter_max_attempts instead. In the " - "future this will default to None and defer to the waiter's default value." + "future this will default to None and defer to the waiter's default value.", + AirflowProviderDeprecationWarning, + stacklevel=2, ) # waiter_countdown defaults to never timing out, which is not supported # by boto waiters, so we will set it here to "a very long time" for now. @@ -725,7 +727,9 @@ def __init__( warnings.warn( "The parameter waiter_check_interval_seconds has been deprecated to " "standardize naming conventions. Please use waiter_delay instead. In the " - "future this will default to None and defer to the waiter's default value." + "future this will default to None and defer to the waiter's default value.", + AirflowProviderDeprecationWarning, + stacklevel=2, ) waiter_delay = waiter_check_interval_seconds super().__init__(**kwargs) @@ -1024,7 +1028,9 @@ def __init__( warnings.warn( "The parameter waiter_check_interval_seconds has been deprecated to standardize " "naming conventions. Please use waiter_delay instead. In the " - "future this will default to None and defer to the waiter's default value." + "future this will default to None and defer to the waiter's default value.", + AirflowProviderDeprecationWarning, + stacklevel=2, ) if waiter_countdown is NOTSET: waiter_max_attempts = 25 if waiter_max_attempts is NOTSET else waiter_max_attempts @@ -1036,7 +1042,9 @@ def __init__( warnings.warn( "The parameter waiter_countdown has been deprecated to standardize " "naming conventions. Please use waiter_max_attempts instead. In the " - "future this will default to None and defer to the waiter's default value." + "future this will default to None and defer to the waiter's default value.", + AirflowProviderDeprecationWarning, + stacklevel=2, ) self.aws_conn_id = aws_conn_id self.release_label = release_label @@ -1205,7 +1213,9 @@ def __init__( warnings.warn( "The parameter waiter_check_interval_seconds has been deprecated to standardize " "naming conventions. Please use waiter_delay instead. In the " - "future this will default to None and defer to the waiter's default value." + "future this will default to None and defer to the waiter's default value.", + AirflowProviderDeprecationWarning, + stacklevel=2, ) if waiter_countdown is NOTSET: waiter_max_attempts = 25 if waiter_max_attempts is NOTSET else waiter_max_attempts @@ -1217,7 +1227,9 @@ def __init__( warnings.warn( "The parameter waiter_countdown has been deprecated to standardize " "naming conventions. Please use waiter_max_attempts instead. In the " - "future this will default to None and defer to the waiter's default value." + "future this will default to None and defer to the waiter's default value.", + AirflowProviderDeprecationWarning, + stacklevel=2, ) self.aws_conn_id = aws_conn_id self.application_id = application_id @@ -1408,7 +1420,9 @@ def __init__( warnings.warn( "The parameter waiter_check_interval_seconds has been deprecated to standardize " "naming conventions. Please use waiter_delay instead. In the " - "future this will default to None and defer to the waiter's default value." + "future this will default to None and defer to the waiter's default value.", + AirflowProviderDeprecationWarning, + stacklevel=2, ) if waiter_countdown is NOTSET: waiter_max_attempts = 25 if waiter_max_attempts is NOTSET else waiter_max_attempts @@ -1420,7 +1434,9 @@ def __init__( warnings.warn( "The parameter waiter_countdown has been deprecated to standardize " "naming conventions. Please use waiter_max_attempts instead. In the " - "future this will default to None and defer to the waiter's default value." + "future this will default to None and defer to the waiter's default value.", + AirflowProviderDeprecationWarning, + stacklevel=2, ) self.aws_conn_id = aws_conn_id self.application_id = application_id @@ -1569,7 +1585,9 @@ def __init__( warnings.warn( "The parameter waiter_check_interval_seconds has been deprecated to standardize " "naming conventions. Please use waiter_delay instead. In the " - "future this will default to None and defer to the waiter's default value." + "future this will default to None and defer to the waiter's default value.", + AirflowProviderDeprecationWarning, + stacklevel=2, ) if waiter_countdown is NOTSET: waiter_max_attempts = 25 if waiter_max_attempts is NOTSET else waiter_max_attempts @@ -1581,7 +1599,9 @@ def __init__( warnings.warn( "The parameter waiter_countdown has been deprecated to standardize " "naming conventions. Please use waiter_max_attempts instead. In the " - "future this will default to None and defer to the waiter's default value." + "future this will default to None and defer to the waiter's default value.", + AirflowProviderDeprecationWarning, + stacklevel=2, ) self.wait_for_delete_completion = wait_for_completion # super stops the app diff --git a/airflow/providers/celery/executors/celery_executor_utils.py b/airflow/providers/celery/executors/celery_executor_utils.py index 6e056409d09d3..292bbc0c70b2e 100644 --- a/airflow/providers/celery/executors/celery_executor_utils.py +++ b/airflow/providers/celery/executors/celery_executor_utils.py @@ -41,7 +41,7 @@ import airflow.settings as settings from airflow.configuration import conf -from airflow.exceptions import AirflowException, RemovedInAirflow3Warning +from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning from airflow.executors.base_executor import BaseExecutor from airflow.stats import Stats from airflow.utils.dag_parsing_context import _airflow_parsing_context_manager @@ -88,7 +88,8 @@ def _get_celery_app() -> Celery: "Change it to `airflow.providers.celery.executors.celery_executor`, and " "update the `-app` flag in your Celery Health Checks " "to use `airflow.providers.celery.executors.celery_executor.app`.", - RemovedInAirflow3Warning, + AirflowProviderDeprecationWarning, + stacklevel=2, ) return Celery(celery_app_name, config_source=celery_configuration) diff --git a/airflow/providers/cncf/kubernetes/pod_generator.py b/airflow/providers/cncf/kubernetes/pod_generator.py index 1e991cb6d959a..9f01231b0c0f3 100644 --- a/airflow/providers/cncf/kubernetes/pod_generator.py +++ b/airflow/providers/cncf/kubernetes/pod_generator.py @@ -39,7 +39,7 @@ from airflow.exceptions import ( AirflowConfigException, AirflowException, - RemovedInAirflow3Warning, + AirflowProviderDeprecationWarning, ) from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import ( POD_NAME_MAX_LENGTH, @@ -155,7 +155,7 @@ def __init__( def gen_pod(self) -> k8s.V1Pod: """Generate pod.""" - warnings.warn("This function is deprecated. ", RemovedInAirflow3Warning) + warnings.warn("This function is deprecated. ", AirflowProviderDeprecationWarning, stacklevel=2) result = self.ud_pod result.metadata.name = add_pod_suffix(pod_name=result.metadata.name) @@ -170,7 +170,9 @@ def add_xcom_sidecar(pod: k8s.V1Pod) -> k8s.V1Pod: """Add sidecar.""" warnings.warn( "This function is deprecated. " - "Please use airflow.providers.cncf.kubernetes.utils.xcom_sidecar.add_xcom_sidecar instead" + "Please use airflow.providers.cncf.kubernetes.utils.xcom_sidecar.add_xcom_sidecar instead", + AirflowProviderDeprecationWarning, + stacklevel=2, ) pod_cp = copy.deepcopy(pod) pod_cp.spec.volumes = pod.spec.volumes or [] @@ -207,7 +209,8 @@ def from_obj(obj) -> dict | k8s.V1Pod | None: "Using a dictionary for the executor_config is deprecated and will soon be removed." 'please use a `kubernetes.client.models.V1Pod` class with a "pod_override" key' " instead. ", - category=RemovedInAirflow3Warning, + category=AirflowProviderDeprecationWarning, + stacklevel=2, ) return PodGenerator.from_legacy_obj(obj) else: @@ -386,7 +389,10 @@ def construct_pod( """ if len(pod_id) > POD_NAME_MAX_LENGTH: warnings.warn( - f"pod_id supplied is longer than {POD_NAME_MAX_LENGTH} characters; truncating and adding unique suffix." + f"pod_id supplied is longer than {POD_NAME_MAX_LENGTH} characters; " + f"truncating and adding unique suffix.", + UserWarning, + stacklevel=2, ) pod_id = add_pod_suffix(pod_name=pod_id, max_len=POD_NAME_MAX_LENGTH) try: @@ -583,7 +589,8 @@ def make_unique_pod_id(pod_id: str) -> str | None: """ warnings.warn( "This function is deprecated. Use `add_pod_suffix` in `kubernetes_helper_functions`.", - RemovedInAirflow3Warning, + AirflowProviderDeprecationWarning, + stacklevel=2, ) if not pod_id: diff --git a/airflow/providers/dbt/cloud/operators/dbt.py b/airflow/providers/dbt/cloud/operators/dbt.py index 74acede95b490..0b56e88e017bf 100644 --- a/airflow/providers/dbt/cloud/operators/dbt.py +++ b/airflow/providers/dbt/cloud/operators/dbt.py @@ -184,6 +184,8 @@ def execute(self, context: Context): warnings.warn( "Argument `wait_for_termination` is False and `deferrable` is True , hence " "`deferrable` parameter doesn't have any effect", + UserWarning, + stacklevel=2, ) return self.run_id diff --git a/airflow/providers/elasticsearch/log/es_task_handler.py b/airflow/providers/elasticsearch/log/es_task_handler.py index 1320756a2c17e..8bd3dca05734f 100644 --- a/airflow/providers/elasticsearch/log/es_task_handler.py +++ b/airflow/providers/elasticsearch/log/es_task_handler.py @@ -171,6 +171,7 @@ def __init__( warnings.warn( "Passing log_id_template to ElasticsearchTaskHandler is deprecated and has no effect", AirflowProviderDeprecationWarning, + stacklevel=2, ) self.log_id_template = log_id_template # Only used on Airflow < 2.3.2. diff --git a/airflow/providers/google/cloud/hooks/bigquery.py b/airflow/providers/google/cloud/hooks/bigquery.py index 3ad29c66f05cf..c699f74756316 100644 --- a/airflow/providers/google/cloud/hooks/bigquery.py +++ b/airflow/providers/google/cloud/hooks/bigquery.py @@ -145,6 +145,7 @@ def get_service(self) -> Resource: warnings.warn( "This method will be deprecated. Please use `BigQueryHook.get_client` method", AirflowProviderDeprecationWarning, + stacklevel=2, ) http_authorized = self._authorize() return build("bigquery", "v2", http=http_authorized, cache_discovery=False) @@ -624,6 +625,7 @@ def create_external_table( "This method is deprecated. Please use `BigQueryHook.create_empty_table` method with " "passing the `table_resource` object. This gives more flexibility than this method.", AirflowProviderDeprecationWarning, + stacklevel=2, ) location = location or self.location src_fmt_configs = src_fmt_configs or {} @@ -808,6 +810,7 @@ def patch_table( warnings.warn( "This method is deprecated, please use ``BigQueryHook.update_table`` method.", AirflowProviderDeprecationWarning, + stacklevel=2, ) table_resource: dict[str, Any] = {} @@ -958,7 +961,9 @@ def patch_dataset(self, dataset_id: str, dataset_resource: dict, project_id: str :param project_id: The Google Cloud Project ID """ warnings.warn( - "This method is deprecated. Please use ``update_dataset``.", AirflowProviderDeprecationWarning + "This method is deprecated. Please use ``update_dataset``.", + AirflowProviderDeprecationWarning, + stacklevel=2, ) project_id = project_id or self.project_id if not dataset_id or not isinstance(dataset_id, str): @@ -1007,7 +1012,9 @@ def get_dataset_tables_list( :return: List of tables associated with the dataset """ warnings.warn( - "This method is deprecated. Please use ``get_dataset_tables``.", AirflowProviderDeprecationWarning + "This method is deprecated. Please use ``get_dataset_tables``.", + AirflowProviderDeprecationWarning, + stacklevel=2, ) project_id = project_id or self.project_id tables = self.get_client().list_tables( @@ -1197,7 +1204,9 @@ def run_table_delete(self, deletion_dataset_table: str, ignore_if_missing: bool :return: """ warnings.warn( - "This method is deprecated. Please use `delete_table`.", AirflowProviderDeprecationWarning + "This method is deprecated. Please use `delete_table`.", + AirflowProviderDeprecationWarning, + stacklevel=2, ) return self.delete_table(table_id=deletion_dataset_table, not_found_ok=ignore_if_missing) @@ -1250,7 +1259,11 @@ def get_tabledata( :param start_index: zero based index of the starting row to read. :return: list of rows """ - warnings.warn("This method is deprecated. Please use `list_rows`.", AirflowProviderDeprecationWarning) + warnings.warn( + "This method is deprecated. Please use `list_rows`.", + AirflowProviderDeprecationWarning, + stacklevel=2, + ) rows = self.list_rows( dataset_id=dataset_id, table_id=table_id, @@ -1458,6 +1471,7 @@ def cancel_query(self) -> None: warnings.warn( "This method is deprecated. Please use `BigQueryHook.cancel_job`.", AirflowProviderDeprecationWarning, + stacklevel=2, ) if self.running_job_id: self.cancel_job(job_id=self.running_job_id) @@ -1617,6 +1631,7 @@ def run_with_configuration(self, configuration: dict) -> str: warnings.warn( "This method is deprecated. Please use `BigQueryHook.insert_job`", AirflowProviderDeprecationWarning, + stacklevel=2, ) job = self.insert_job(configuration=configuration, project_id=self.project_id) self.running_job_id = job.job_id @@ -1714,6 +1729,7 @@ def run_load( warnings.warn( "This method is deprecated. Please use `BigQueryHook.insert_job` method.", AirflowProviderDeprecationWarning, + stacklevel=2, ) if not self.project_id: @@ -1901,6 +1917,7 @@ def run_copy( warnings.warn( "This method is deprecated. Please use `BigQueryHook.insert_job` method.", AirflowProviderDeprecationWarning, + stacklevel=2, ) if not self.project_id: raise ValueError("The project_id should be set") @@ -1982,6 +1999,7 @@ def run_extract( warnings.warn( "This method is deprecated. Please use `BigQueryHook.insert_job` method.", AirflowProviderDeprecationWarning, + stacklevel=2, ) if not self.project_id: raise ValueError("The project_id should be set") @@ -2109,6 +2127,7 @@ def run_query( warnings.warn( "This method is deprecated. Please use `BigQueryHook.insert_job` method.", AirflowProviderDeprecationWarning, + stacklevel=2, ) if not self.project_id: raise ValueError("The project_id should be set") diff --git a/airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py b/airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py index b8c1215b82145..495f58cea5d1f 100644 --- a/airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py +++ b/airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py @@ -250,7 +250,11 @@ def list_transfer_job(self, request_filter: dict | None = None, **kwargs) -> lis request_filter = kwargs["filter"] if not isinstance(request_filter, dict): raise ValueError(f"The request_filter should be dict and is {type(request_filter)}") - warnings.warn("Use 'request_filter' instead of 'filter'", AirflowProviderDeprecationWarning) + warnings.warn( + "Use 'request_filter' instead of 'filter'", + AirflowProviderDeprecationWarning, + stacklevel=2, + ) else: raise TypeError("list_transfer_job missing 1 required positional argument: 'request_filter'") @@ -374,7 +378,11 @@ def list_transfer_operations(self, request_filter: dict | None = None, **kwargs) request_filter = kwargs["filter"] if not isinstance(request_filter, dict): raise ValueError(f"The request_filter should be dict and is {type(request_filter)}") - warnings.warn("Use 'request_filter' instead of 'filter'", AirflowProviderDeprecationWarning) + warnings.warn( + "Use 'request_filter' instead of 'filter'", + AirflowProviderDeprecationWarning, + stacklevel=2, + ) else: raise TypeError( "list_transfer_operations missing 1 required positional argument: 'request_filter'" diff --git a/airflow/providers/google/cloud/hooks/kubernetes_engine.py b/airflow/providers/google/cloud/hooks/kubernetes_engine.py index df0bd86025224..0e62b990da921 100644 --- a/airflow/providers/google/cloud/hooks/kubernetes_engine.py +++ b/airflow/providers/google/cloud/hooks/kubernetes_engine.py @@ -102,6 +102,7 @@ def get_conn(self) -> container_v1.ClusterManagerClient: warnings.warn( "The get_conn method has been deprecated. You should use the get_cluster_manager_client method.", AirflowProviderDeprecationWarning, + stacklevel=2, ) return self.get_cluster_manager_client() @@ -111,6 +112,7 @@ def get_client(self) -> ClusterManagerClient: warnings.warn( "The get_client method has been deprecated. You should use the get_conn method.", AirflowProviderDeprecationWarning, + stacklevel=2, ) return self.get_conn() diff --git a/airflow/providers/google/cloud/hooks/vertex_ai/custom_job.py b/airflow/providers/google/cloud/hooks/vertex_ai/custom_job.py index dc6bd313f37d9..d94cd83b1df91 100644 --- a/airflow/providers/google/cloud/hooks/vertex_ai/custom_job.py +++ b/airflow/providers/google/cloud/hooks/vertex_ai/custom_job.py @@ -391,6 +391,7 @@ def cancel_pipeline_job( warnings.warn( "This method is deprecated, please use `PipelineJobHook.cancel_pipeline_job` method.", AirflowProviderDeprecationWarning, + stacklevel=2, ) client = self.get_pipeline_service_client(region) name = client.pipeline_job_path(project_id, region, pipeline_job) @@ -516,6 +517,7 @@ def create_pipeline_job( warnings.warn( "This method is deprecated, please use `PipelineJobHook.create_pipeline_job` method.", AirflowProviderDeprecationWarning, + stacklevel=2, ) client = self.get_pipeline_service_client(region) parent = client.common_location_path(project_id, region) @@ -1777,6 +1779,7 @@ def delete_pipeline_job( warnings.warn( "This method is deprecated, please use `PipelineJobHook.delete_pipeline_job` method.", AirflowProviderDeprecationWarning, + stacklevel=2, ) client = self.get_pipeline_service_client(region) name = client.pipeline_job_path(project_id, region, pipeline_job) @@ -1882,6 +1885,7 @@ def get_pipeline_job( warnings.warn( "This method is deprecated, please use `PipelineJobHook.get_pipeline_job` method.", AirflowProviderDeprecationWarning, + stacklevel=2, ) client = self.get_pipeline_service_client(region) name = client.pipeline_job_path(project_id, region, pipeline_job) @@ -2038,6 +2042,7 @@ def list_pipeline_jobs( warnings.warn( "This method is deprecated, please use `PipelineJobHook.list_pipeline_jobs` method.", AirflowProviderDeprecationWarning, + stacklevel=2, ) client = self.get_pipeline_service_client(region) parent = client.common_location_path(project_id, region) diff --git a/airflow/providers/google/cloud/operators/bigquery.py b/airflow/providers/google/cloud/operators/bigquery.py index 18bd065cb286d..deadf0d193604 100644 --- a/airflow/providers/google/cloud/operators/bigquery.py +++ b/airflow/providers/google/cloud/operators/bigquery.py @@ -1487,6 +1487,7 @@ def __init__( warnings.warn( "`exists_ok` parameter is deprecated, please use `if_exists`", AirflowProviderDeprecationWarning, + stacklevel=2, ) self.if_exists = IfExistAction.IGNORE if exists_ok else IfExistAction.LOG else: @@ -1995,6 +1996,7 @@ def __init__( warnings.warn( "`exists_ok` parameter is deprecated, please use `if_exists`", AirflowProviderDeprecationWarning, + stacklevel=2, ) self.if_exists = IfExistAction.IGNORE if exists_ok else IfExistAction.LOG else: diff --git a/airflow/providers/google/cloud/operators/dataproc.py b/airflow/providers/google/cloud/operators/dataproc.py index 6faea746e6fa6..306e0dc03d6e3 100644 --- a/airflow/providers/google/cloud/operators/dataproc.py +++ b/airflow/providers/google/cloud/operators/dataproc.py @@ -608,7 +608,7 @@ def __init__( "You can use `airflow.dataproc.ClusterGenerator.generate_cluster` " "method to obtain cluster object.", AirflowProviderDeprecationWarning, - stacklevel=1, + stacklevel=2, ) # Remove result of apply defaults if "params" in kwargs: @@ -878,7 +878,7 @@ def __init__( f"The `{type(self).__name__}` operator is deprecated, " "please use `DataprocUpdateClusterOperator` instead.", AirflowProviderDeprecationWarning, - stacklevel=1, + stacklevel=2, ) def _build_scale_cluster_data(self) -> dict: @@ -1311,7 +1311,7 @@ def __init__( " `generate_job` method of `{cls}` to generate dictionary representing your job" " and use it with the new operator.".format(cls=type(self).__name__), AirflowProviderDeprecationWarning, - stacklevel=1, + stacklevel=2, ) super().__init__( @@ -1403,7 +1403,7 @@ def __init__( " `generate_job` method of `{cls}` to generate dictionary representing your job" " and use it with the new operator.".format(cls=type(self).__name__), AirflowProviderDeprecationWarning, - stacklevel=1, + stacklevel=2, ) super().__init__( @@ -1496,7 +1496,7 @@ def __init__( " `generate_job` method of `{cls}` to generate dictionary representing your job" " and use it with the new operator.".format(cls=type(self).__name__), AirflowProviderDeprecationWarning, - stacklevel=1, + stacklevel=2, ) super().__init__( @@ -1591,7 +1591,7 @@ def __init__( " `generate_job` method of `{cls}` to generate dictionary representing your job" " and use it with the new operator.".format(cls=type(self).__name__), AirflowProviderDeprecationWarning, - stacklevel=1, + stacklevel=2, ) super().__init__( @@ -1682,7 +1682,7 @@ def __init__( " `generate_job` method of `{cls}` to generate dictionary representing your job" " and use it with the new operator.".format(cls=type(self).__name__), AirflowProviderDeprecationWarning, - stacklevel=1, + stacklevel=2, ) super().__init__( @@ -1796,7 +1796,7 @@ def __init__( " `generate_job` method of `{cls}` to generate dictionary representing your job" " and use it with the new operator.".format(cls=type(self).__name__), AirflowProviderDeprecationWarning, - stacklevel=1, + stacklevel=2, ) super().__init__( diff --git a/airflow/providers/google/cloud/operators/kubernetes_engine.py b/airflow/providers/google/cloud/operators/kubernetes_engine.py index a634b325011ae..e5ca3c271b938 100644 --- a/airflow/providers/google/cloud/operators/kubernetes_engine.py +++ b/airflow/providers/google/cloud/operators/kubernetes_engine.py @@ -325,7 +325,9 @@ def _alert_deprecated_body_fields(self) -> None: for deprecated_field, replacement in deprecated_body_fields_with_replacement: if self._body_field(deprecated_field): warnings.warn( - f"The body field '{deprecated_field}' is deprecated. Use '{replacement}' instead." + f"The body field '{deprecated_field}' is deprecated. Use '{replacement}' instead.", + AirflowProviderDeprecationWarning, + stacklevel=2, ) def execute(self, context: Context) -> str: @@ -513,7 +515,7 @@ def get_gke_config_file(): "The `get_gke_config_file` method is deprecated, " "please use `fetch_cluster_info` instead to get the cluster info for connecting to it.", AirflowProviderDeprecationWarning, - stacklevel=1, + stacklevel=2, ) @cached_property diff --git a/airflow/providers/google/cloud/sensors/bigquery.py b/airflow/providers/google/cloud/sensors/bigquery.py index d1492fe2480f0..6579bf07e7822 100644 --- a/airflow/providers/google/cloud/sensors/bigquery.py +++ b/airflow/providers/google/cloud/sensors/bigquery.py @@ -305,6 +305,7 @@ def __init__(self, **kwargs): "Please use `BigQueryTableExistenceSensor` and " "set `deferrable` attribute to `True` instead", AirflowProviderDeprecationWarning, + stacklevel=2, ) super().__init__(deferrable=True, **kwargs) @@ -346,5 +347,6 @@ def __init__(self, **kwargs): "Please use `BigQueryTablePartitionExistenceSensor` and " "set `deferrable` attribute to `True` instead", AirflowProviderDeprecationWarning, + stacklevel=2, ) super().__init__(deferrable=True, **kwargs) diff --git a/airflow/providers/google/cloud/sensors/gcs.py b/airflow/providers/google/cloud/sensors/gcs.py index c5a80e2d55f68..707013ab05b80 100644 --- a/airflow/providers/google/cloud/sensors/gcs.py +++ b/airflow/providers/google/cloud/sensors/gcs.py @@ -169,6 +169,7 @@ def __init__(self, **kwargs: Any) -> None: "Class `GCSObjectExistenceAsyncSensor` is deprecated and will be removed in a future release. " "Please use `GCSObjectExistenceSensor` and set `deferrable` attribute to `True` instead", AirflowProviderDeprecationWarning, + stacklevel=2, ) super().__init__(deferrable=True, **kwargs) diff --git a/airflow/providers/google/cloud/transfers/bigquery_to_mssql.py b/airflow/providers/google/cloud/transfers/bigquery_to_mssql.py index 4f4ffb0985438..e3720b3272e93 100644 --- a/airflow/providers/google/cloud/transfers/bigquery_to_mssql.py +++ b/airflow/providers/google/cloud/transfers/bigquery_to_mssql.py @@ -64,6 +64,7 @@ def __init__( warnings.warn( "The `mssql_table` parameter has been deprecated. Use `target_table_name` instead.", AirflowProviderDeprecationWarning, + stacklevel=2, ) if target_table_name is not None: diff --git a/airflow/providers/google/cloud/transfers/bigquery_to_mysql.py b/airflow/providers/google/cloud/transfers/bigquery_to_mysql.py index dda5f9b742803..166305f76629b 100644 --- a/airflow/providers/google/cloud/transfers/bigquery_to_mysql.py +++ b/airflow/providers/google/cloud/transfers/bigquery_to_mysql.py @@ -54,6 +54,7 @@ def __init__( warnings.warn( "The `mysql_table` parameter has been deprecated. Use `target_table_name` instead.", AirflowProviderDeprecationWarning, + stacklevel=2, ) if target_table_name is not None: diff --git a/airflow/providers/google/cloud/triggers/kubernetes_engine.py b/airflow/providers/google/cloud/triggers/kubernetes_engine.py index d3da97b0cd793..da068dcfc3d02 100644 --- a/airflow/providers/google/cloud/triggers/kubernetes_engine.py +++ b/airflow/providers/google/cloud/triggers/kubernetes_engine.py @@ -101,6 +101,7 @@ def __init__( warnings.warn( "`should_delete_pod` parameter is deprecated, please use `on_finish_action`", AirflowProviderDeprecationWarning, + stacklevel=2, ) self.on_finish_action = ( OnFinishAction.DELETE_POD if should_delete_pod else OnFinishAction.KEEP_POD diff --git a/airflow/providers/google/marketing_platform/hooks/analytics.py b/airflow/providers/google/marketing_platform/hooks/analytics.py index ec98ec2829397..1bbbbee91b3c2 100644 --- a/airflow/providers/google/marketing_platform/hooks/analytics.py +++ b/airflow/providers/google/marketing_platform/hooks/analytics.py @@ -36,7 +36,7 @@ def __init__(self, api_version: str = "v3", *args, **kwargs): f"The `{type(self).__name__}` class is deprecated, please use " f"`GoogleAnalyticsAdminHook` instead.", AirflowProviderDeprecationWarning, - stacklevel=1, + stacklevel=2, ) self.api_version = api_version diff --git a/airflow/providers/google/marketing_platform/operators/analytics.py b/airflow/providers/google/marketing_platform/operators/analytics.py index d987b62591ed1..65e18d8bd52e5 100644 --- a/airflow/providers/google/marketing_platform/operators/analytics.py +++ b/airflow/providers/google/marketing_platform/operators/analytics.py @@ -80,7 +80,7 @@ def __init__( f"The `{type(self).__name__}` operator is deprecated, please use " f"`GoogleAnalyticsAdminListAccountsOperator` instead.", AirflowProviderDeprecationWarning, - stacklevel=1, + stacklevel=2, ) super().__init__(**kwargs) @@ -153,7 +153,7 @@ def __init__( f"The `{type(self).__name__}` operator is deprecated, please use " f"`GoogleAnalyticsAdminGetGoogleAdsLinkOperator` instead.", AirflowProviderDeprecationWarning, - stacklevel=1, + stacklevel=2, ) self.account_id = account_id @@ -228,7 +228,7 @@ def __init__( f"The `{type(self).__name__}` operator is deprecated, please use " f"`GoogleAnalyticsAdminListGoogleAdsLinksOperator` instead.", AirflowProviderDeprecationWarning, - stacklevel=1, + stacklevel=2, ) self.account_id = account_id @@ -307,7 +307,7 @@ def __init__( f"The `{type(self).__name__}` operator is deprecated, please use " f"`GoogleAnalyticsAdminCreateDataStreamOperator` instead.", AirflowProviderDeprecationWarning, - stacklevel=1, + stacklevel=2, ) super().__init__(**kwargs) self.storage_bucket = storage_bucket @@ -399,7 +399,7 @@ def __init__( f"The `{type(self).__name__}` operator is deprecated, please use " f"`GoogleAnalyticsAdminDeleteDataStreamOperator` instead.", AirflowProviderDeprecationWarning, - stacklevel=1, + stacklevel=2, ) super().__init__(**kwargs) diff --git a/airflow/providers/http/operators/http.py b/airflow/providers/http/operators/http.py index 524de8c5850e2..47b40babd1bf5 100644 --- a/airflow/providers/http/operators/http.py +++ b/airflow/providers/http/operators/http.py @@ -349,5 +349,6 @@ def __init__(self, **kwargs: Any): "Class `SimpleHttpOperator` is deprecated and " "will be removed in a future release. Please use `HttpOperator` instead.", AirflowProviderDeprecationWarning, + stacklevel=2, ) super().__init__(**kwargs) diff --git a/airflow/providers/microsoft/azure/hooks/adx.py b/airflow/providers/microsoft/azure/hooks/adx.py index b06bd40faf90c..5cefc7c888e36 100644 --- a/airflow/providers/microsoft/azure/hooks/adx.py +++ b/airflow/providers/microsoft/azure/hooks/adx.py @@ -142,7 +142,9 @@ def warn_if_collison(key, backcompat_key): warnings.warn( f"Conflicting params `{key}` and `{backcompat_key}` found in extras for conn " f"{self.conn_id}. Using value for `{key}`. Please ensure this is the correct value " - f"and remove the backcompat key `{backcompat_key}`." + f"and remove the backcompat key `{backcompat_key}`.", + UserWarning, + stacklevel=2, ) def get_required_param(name: str) -> str: diff --git a/airflow/providers/microsoft/azure/operators/data_factory.py b/airflow/providers/microsoft/azure/operators/data_factory.py index 2aac723f86c63..03d80c9f8d2c1 100644 --- a/airflow/providers/microsoft/azure/operators/data_factory.py +++ b/airflow/providers/microsoft/azure/operators/data_factory.py @@ -229,6 +229,8 @@ def execute(self, context: Context) -> None: warnings.warn( "Argument `wait_for_termination` is False and `deferrable` is True , hence " "`deferrable` parameter doesn't have any effect", + UserWarning, + stacklevel=2, ) def execute_complete(self, context: Context, event: dict[str, str]) -> None: diff --git a/airflow/providers/microsoft/azure/utils.py b/airflow/providers/microsoft/azure/utils.py index 58669562f3464..a2f7cd3f98dcd 100644 --- a/airflow/providers/microsoft/azure/utils.py +++ b/airflow/providers/microsoft/azure/utils.py @@ -43,7 +43,9 @@ def get_field(*, conn_id: str, conn_type: str, extras: dict, field_name: str): warnings.warn( f"Conflicting params `{field_name}` and `{backcompat_key}` found in extras for conn " f"{conn_id}. Using value for `{field_name}`. Please ensure this is the correct " - f"value and remove the backcompat key `{backcompat_key}`." + f"value and remove the backcompat key `{backcompat_key}`.", + UserWarning, + stacklevel=2, ) ret = extras[field_name] elif backcompat_key in extras: diff --git a/airflow/providers/pagerduty/hooks/pagerduty_events.py b/airflow/providers/pagerduty/hooks/pagerduty_events.py index 438f0ebc7f993..cfa8df30bea67 100644 --- a/airflow/providers/pagerduty/hooks/pagerduty_events.py +++ b/airflow/providers/pagerduty/hooks/pagerduty_events.py @@ -125,7 +125,7 @@ def create_event( "This method will be deprecated. Please use the " "`PagerdutyEventsHook.send_event` to interact with the Events API", AirflowProviderDeprecationWarning, - stacklevel=1, + stacklevel=2, ) data = PagerdutyEventsHook.prepare_event_data( diff --git a/airflow/providers/slack/utils/__init__.py b/airflow/providers/slack/utils/__init__.py index c35bf87c727b5..b20dfe3209a29 100644 --- a/airflow/providers/slack/utils/__init__.py +++ b/airflow/providers/slack/utils/__init__.py @@ -48,7 +48,9 @@ def get(self, field, default: Any = NOTSET): warnings.warn( f"Conflicting params `{field}` and `{backcompat_key}` found in extras for conn " f"{self.conn_id}. Using value for `{field}`. Please ensure this is the correct value " - f"and remove the backcompat key `{backcompat_key}`." + f"and remove the backcompat key `{backcompat_key}`.", + UserWarning, + stacklevel=2, ) return self.extra[field] elif backcompat_key in self.extra and self.extra[backcompat_key] not in (None, ""): diff --git a/airflow/providers/snowflake/hooks/snowflake.py b/airflow/providers/snowflake/hooks/snowflake.py index cd1dc871650b0..2e71675cda24e 100644 --- a/airflow/providers/snowflake/hooks/snowflake.py +++ b/airflow/providers/snowflake/hooks/snowflake.py @@ -170,7 +170,9 @@ def _get_field(self, extra_dict, field_name): warnings.warn( f"Conflicting params `{field_name}` and `{backcompat_key}` found in extras. " f"Using value for `{field_name}`. Please ensure this is the correct " - f"value and remove the backcompat key `{backcompat_key}`." + f"value and remove the backcompat key `{backcompat_key}`.", + UserWarning, + stacklevel=2, ) return extra_dict[field_name] or None return extra_dict.get(backcompat_key) or None diff --git a/airflow/providers/tableau/hooks/tableau.py b/airflow/providers/tableau/hooks/tableau.py index d39d1f5b7beed..cd865aa4089a8 100644 --- a/airflow/providers/tableau/hooks/tableau.py +++ b/airflow/providers/tableau/hooks/tableau.py @@ -129,6 +129,7 @@ def _auth_via_token(self) -> Auth.contextmgr: "Authentication via personal access token is deprecated. " "Please, use the password authentication to avoid inconsistencies.", AirflowProviderDeprecationWarning, + stacklevel=2, ) tableau_auth = PersonalAccessTokenAuth( token_name=self.conn.extra_dejson["token_name"], diff --git a/airflow/providers/weaviate/operators/weaviate.py b/airflow/providers/weaviate/operators/weaviate.py index d23d6f3cfa1fb..d4dadf261cc3a 100644 --- a/airflow/providers/weaviate/operators/weaviate.py +++ b/airflow/providers/weaviate/operators/weaviate.py @@ -81,6 +81,7 @@ def __init__( "Passing 'input_json' to WeaviateIngestOperator is deprecated and" " you should use 'input_data' instead", AirflowProviderDeprecationWarning, + stacklevel=2, ) self.input_data = input_json else: diff --git a/airflow/providers/yandex/hooks/yandex.py b/airflow/providers/yandex/hooks/yandex.py index 3838cde706997..02bf037ae5075 100644 --- a/airflow/providers/yandex/hooks/yandex.py +++ b/airflow/providers/yandex/hooks/yandex.py @@ -104,7 +104,12 @@ def provider_user_agent(cls) -> str | None: ) ).strip() except KeyError: - warnings.warn(f"Hook '{cls.hook_name}' info is not initialized in airflow.ProviderManager") + warnings.warn( + f"Hook '{cls.hook_name}' info is not initialized in airflow.ProviderManager", + UserWarning, + stacklevel=2, + ) + return None @classmethod diff --git a/airflow/providers/yandex/operators/yandexcloud_dataproc.py b/airflow/providers/yandex/operators/yandexcloud_dataproc.py index de4ea6e9c2bac..0489a2fd80cc0 100644 --- a/airflow/providers/yandex/operators/yandexcloud_dataproc.py +++ b/airflow/providers/yandex/operators/yandexcloud_dataproc.py @@ -20,6 +20,7 @@ from dataclasses import dataclass from typing import TYPE_CHECKING, Iterable, Sequence +from airflow.exceptions import AirflowProviderDeprecationWarning from airflow.models import BaseOperator from airflow.providers.yandex.hooks.yandexcloud_dataproc import DataprocHook @@ -261,7 +262,11 @@ def _setup(self, context: Context) -> DataprocHook: if self.yandex_conn_id is None: xcom_yandex_conn_id = context["task_instance"].xcom_pull(key="yandexcloud_connection_id") if xcom_yandex_conn_id: - warnings.warn("Implicit pass of `yandex_conn_id` is deprecated, please pass it explicitly") + warnings.warn( + "Implicit pass of `yandex_conn_id` is deprecated, please pass it explicitly", + AirflowProviderDeprecationWarning, + stacklevel=2, + ) self.yandex_conn_id = xcom_yandex_conn_id return DataprocHook(yandex_conn_id=self.yandex_conn_id) diff --git a/tests/providers/amazon/conftest.py b/tests/providers/amazon/conftest.py index 6c24b92de8a01..85daf0a241858 100644 --- a/tests/providers/amazon/conftest.py +++ b/tests/providers/amazon/conftest.py @@ -33,13 +33,13 @@ def botocore_version(): try: version = importlib_metadata.version("botocore") except importlib_metadata.PackageNotFoundError: - warnings.warn("'botocore' package not found'", UserWarning) + warnings.warn("'botocore' package not found'", UserWarning, stacklevel=2) return None try: return tuple(map(int, version.split(".")[:3])) except Exception: - warnings.warn(f"Unable to parse botocore {version!r}", UserWarning) + warnings.warn(f"Unable to parse botocore {version!r}", UserWarning, stacklevel=2) return None