diff --git a/INSTALL b/INSTALL
index 975232bad7beb..f24cd8b2e09f5 100644
--- a/INSTALL
+++ b/INSTALL
@@ -281,11 +281,11 @@ apache.hdfs, apache.hive, apache.impala, apache.kafka, apache.kylin, apache.livy
apache.pinot, apache.spark, apprise, arangodb, asana, atlassian.jira, celery, cloudant,
cncf.kubernetes, cohere, common.io, common.sql, databricks, datadog, dbt.cloud, dingding, discord,
docker, elasticsearch, exasol, fab, facebook, ftp, github, google, grpc, hashicorp, http, imap,
-influxdb, jdbc, jenkins, microsoft.azure, microsoft.mssql, microsoft.psrp, microsoft.winrm, mongo,
-mysql, neo4j, odbc, openai, openfaas, openlineage, opensearch, opsgenie, oracle, pagerduty,
-papermill, pgvector, pinecone, postgres, presto, qdrant, redis, salesforce, samba, segment,
-sendgrid, sftp, singularity, slack, smtp, snowflake, sqlite, ssh, tableau, tabular, telegram,
-teradata, trino, vertica, weaviate, yandex, zendesk
+influxdb, jdbc, jenkins, microsoft.azure, microsoft.mssql, microsoft.powerbi, microsoft.psrp,
+microsoft.winrm, mongo, mysql, neo4j, odbc, openai, openfaas, openlineage, opensearch, opsgenie,
+oracle, pagerduty, papermill, pgvector, pinecone, postgres, presto, qdrant, redis, salesforce,
+samba, segment, sendgrid, sftp, singularity, slack, smtp, snowflake, sqlite, ssh, tableau, tabular,
+telegram, teradata, trino, vertica, weaviate, yandex, zendesk
# END PROVIDER EXTRAS HERE
diff --git a/airflow/providers/microsoft/powerbi/CHANGELOG.rst b/airflow/providers/microsoft/powerbi/CHANGELOG.rst
new file mode 100644
index 0000000000000..106592bd11775
--- /dev/null
+++ b/airflow/providers/microsoft/powerbi/CHANGELOG.rst
@@ -0,0 +1,16 @@
+ .. Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ .. http://www.apache.org/licenses/LICENSE-2.0
+
+ .. Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
diff --git a/airflow/providers/microsoft/powerbi/__init__.py b/airflow/providers/microsoft/powerbi/__init__.py
new file mode 100644
index 0000000000000..13a83393a9124
--- /dev/null
+++ b/airflow/providers/microsoft/powerbi/__init__.py
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git a/airflow/providers/microsoft/powerbi/hooks/__init__.py b/airflow/providers/microsoft/powerbi/hooks/__init__.py
new file mode 100644
index 0000000000000..217e5db960782
--- /dev/null
+++ b/airflow/providers/microsoft/powerbi/hooks/__init__.py
@@ -0,0 +1,17 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git a/airflow/providers/microsoft/powerbi/hooks/powerbi.py b/airflow/providers/microsoft/powerbi/hooks/powerbi.py
new file mode 100644
index 0000000000000..f2b12845f4810
--- /dev/null
+++ b/airflow/providers/microsoft/powerbi/hooks/powerbi.py
@@ -0,0 +1,322 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from __future__ import annotations
+
+import time
+from enum import Enum
+from typing import Any, Callable
+
+import requests
+from azure.identity import ClientSecretCredential
+
+from airflow.exceptions import AirflowException
+from airflow.hooks.base import BaseHook
+
+
+class PowerBIDatasetRefreshFields(Enum):
+ """Power BI refresh dataset details."""
+
+ REQUEST_ID = "request_id"
+ STATUS = "status"
+ END_TIME = "end_time"
+ ERROR = "error"
+
+
+class PowerBIDatasetRefreshStatus:
+ """Power BI refresh dataset statuses."""
+
+ # If the completion state is unknown or a refresh is in progress.
+ IN_PROGRESS = "In Progress"
+ FAILED = "Failed"
+ COMPLETED = "Completed"
+ DISABLED = "Disabled"
+
+ TERMINAL_STATUSES = {FAILED, COMPLETED}
+
+
+class PowerBIDatasetRefreshException(AirflowException):
+ """An exception that indicates a dataset refresh failed to complete."""
+
+
+class PowerBIHook(BaseHook):
+ """
+ A hook to interact with Power BI.
+
+ :param powerbi_conn_id: Airflow Connection ID that contains the connection
+ information for the Power BI account used for authentication.
+ """
+
+ conn_type: str = "powerbi"
+ conn_name_attr: str = "powerbi_conn_id"
+ default_conn_name: str = "powerbi_default"
+ hook_name: str = "Power BI"
+
+ @classmethod
+ def get_connection_form_widgets(cls) -> dict[str, Any]:
+ """Return connection widgets to add to connection form."""
+ from flask_appbuilder.fieldwidgets import BS3TextFieldWidget
+ from flask_babel import lazy_gettext
+ from wtforms import StringField
+
+ return {
+ "tenantId": StringField(lazy_gettext("Tenant ID"), widget=BS3TextFieldWidget()),
+ }
+
+ @classmethod
+ def get_ui_field_behaviour(cls) -> dict[str, Any]:
+ """Return custom field behaviour."""
+ return {
+ "hidden_fields": ["schema", "port", "host", "extra"],
+ "relabeling": {
+ "login": "Client ID",
+ "password": "Secret",
+ },
+ }
+
+ def __init__(
+ self,
+ *,
+ powerbi_conn_id: str = default_conn_name,
+ ):
+ self.conn_id = powerbi_conn_id
+ self._api_version = "v1.0"
+ self._base_url = "https://api.powerbi.com"
+ super().__init__()
+
+ def refresh_dataset(self, dataset_id: str, group_id: str) -> str:
+ """
+ Triggers a refresh for the specified dataset from the given group id.
+
+ :param dataset_id: The dataset id.
+ :param group_id: The workspace id.
+
+ :return: Request id of the dataset refresh request.
+ """
+ url = f"{self._base_url}/{self._api_version}/myorg"
+
+ # add the group id if it is specified
+ url += f"/groups/{group_id}"
+
+ # add the dataset key
+ url += f"/datasets/{dataset_id}/refreshes"
+
+ response = self._send_request("POST", url=url)
+
+ if response.ok:
+ request_id = response.headers["RequestId"]
+ return request_id
+
+ raise PowerBIDatasetRefreshException(
+ "Failed to trigger dataset refresh. Status code: %s", str(response.status_code)
+ )
+
+ def _get_token(self) -> str:
+ """Retrieve the access token used to authenticate against the API."""
+ conn = self.get_connection(self.conn_id)
+ extras = conn.extra_dejson
+ tenant = extras.get("tenantId", None)
+
+ if not conn.login or not conn.password:
+ raise ValueError("A Client ID and Secret is required to authenticate with Power BI.")
+
+ if not tenant:
+ raise ValueError("A Tenant ID is required when authenticating with Client ID and Secret.")
+
+ credential = ClientSecretCredential(
+ client_id=conn.login, client_secret=conn.password, tenant_id=tenant
+ )
+
+ resource = "https://analysis.windows.net/powerbi/api"
+
+ access_token = credential.get_token(f"{resource}/.default")
+
+ return access_token.token
+
+ def get_refresh_history(
+ self,
+ dataset_id: str,
+ group_id: str,
+ ) -> list[dict[str, str]]:
+ """
+ Retrieve the refresh history of the specified dataset from the given group ID.
+
+ :param dataset_id: The dataset ID.
+ :param group_id: The workspace ID.
+
+ :return: Dictionary containing all the refresh histories of the dataset.
+ """
+ url = f"{self._base_url}/{self._api_version}/myorg"
+
+ # add the group id
+ url += f"/groups/{group_id}"
+
+ # add the dataset id
+ url += f"/datasets/{dataset_id}/refreshes"
+
+ raw_response = self._send_request("GET", url=url)
+
+ if raw_response.ok:
+ response = raw_response.json()
+ refresh_histories = response.get("value")
+ return [self.raw_to_refresh_details(refresh_history) for refresh_history in refresh_histories]
+
+ raise PowerBIDatasetRefreshException(
+ "Failed to retrieve refresh history. Status code: %s", str(response.status_code)
+ )
+
+ def raw_to_refresh_details(self, refresh_details: dict) -> dict[str, str]:
+ """
+ Convert raw refresh details into a dictionary containing required fields.
+
+ :param refresh_details: Raw object of refresh details.
+ """
+ return {
+ PowerBIDatasetRefreshFields.REQUEST_ID.value: str(refresh_details.get("requestId")),
+ PowerBIDatasetRefreshFields.STATUS.value: (
+ "In Progress"
+ if str(refresh_details.get("status")) == "Unknown"
+ else str(refresh_details.get("status"))
+ ),
+ PowerBIDatasetRefreshFields.END_TIME.value: str(refresh_details.get("endTime")),
+ PowerBIDatasetRefreshFields.ERROR.value: str(refresh_details.get("serviceExceptionJson")),
+ }
+
+ def get_latest_refresh_details(self, dataset_id: str, group_id: str) -> dict[str, str] | None:
+ """
+ Get the refresh details of the most recent dataset refresh in the refresh history of the data source.
+
+ :return: Dictionary containing refresh status and end time if refresh history exists, otherwise None.
+ """
+ history = self.get_refresh_history(dataset_id=dataset_id, group_id=group_id)
+
+ if len(history) == 0:
+ return None
+
+ refresh_details = history[0]
+ return refresh_details
+
+ def get_refresh_details_by_request_id(self, dataset_id: str, group_id: str, request_id) -> dict[str, str]:
+ """
+ Get the refresh details of the given request Id.
+
+ :param request_id: Request Id of the Dataset refresh.
+ """
+ refresh_histories = self.get_refresh_history(dataset_id=dataset_id, group_id=group_id)
+
+ if len(refresh_histories) == 0:
+ raise PowerBIDatasetRefreshException(
+ f"Unable to fetch the details of dataset refresh with Request Id: {request_id}"
+ )
+
+ request_ids = [
+ refresh_history.get(PowerBIDatasetRefreshFields.REQUEST_ID.value)
+ for refresh_history in refresh_histories
+ ]
+
+ if request_id not in request_ids:
+ raise PowerBIDatasetRefreshException(
+ f"Unable to fetch the details of dataset refresh with Request Id: {request_id}"
+ )
+
+ request_id_index = request_ids.index(request_id)
+ refresh_details = refresh_histories[request_id_index]
+
+ return refresh_details
+
+ def wait_for_dataset_refresh_status(
+ self,
+ *,
+ expected_status: str,
+ request_id: str,
+ dataset_id: str,
+ group_id: str,
+ check_interval: int = 60,
+ timeout: int = 60 * 60 * 24 * 7,
+ ) -> bool:
+ """
+ Wait until the dataset refresh of given request ID has reached the expected status.
+
+ :param expected_status: The desired status to check against a dataset refresh's current status.
+ :param request_id: Request id for the dataset refresh request.
+ :param check_interval: Time in seconds to check on a dataset refresh's status.
+ :param timeout: Time in seconds to wait for a dataset to reach a terminal status or the expected status.
+ :return: Boolean indicating if the dataset refresh has reached the ``expected_status`` before the timeout.
+ """
+ dataset_refresh_details = self.get_refresh_details_by_request_id(
+ dataset_id=dataset_id, group_id=group_id, request_id=request_id
+ )
+ dataset_refresh_status = dataset_refresh_details.get(PowerBIDatasetRefreshFields.STATUS.value)
+
+ start_time = time.monotonic()
+
+ while (
+ dataset_refresh_status not in PowerBIDatasetRefreshStatus.TERMINAL_STATUSES
+ and dataset_refresh_status != expected_status
+ ):
+ # Check if the dataset-refresh duration has exceeded the ``timeout`` configured.
+ if start_time + timeout < time.monotonic():
+ raise PowerBIDatasetRefreshException(
+ f"Dataset refresh has not reached a terminal status after {timeout} seconds"
+ )
+
+ time.sleep(check_interval)
+
+ dataset_refresh_details = self.get_refresh_details_by_request_id(
+ dataset_id=dataset_id, group_id=group_id, request_id=request_id
+ )
+ dataset_refresh_status = dataset_refresh_details.get(PowerBIDatasetRefreshFields.STATUS.value)
+
+ return dataset_refresh_status == expected_status
+
+ def trigger_dataset_refresh(self, *, dataset_id: str, group_id: str) -> str:
+ """
+ Triggers the Power BI dataset refresh.
+
+ :param dataset_id: The dataset ID.
+ :param group_id: The workspace ID.
+
+ :return: Request ID of the dataset refresh request.
+ """
+ # Start dataset refresh
+ self.log.info("Starting dataset refresh.")
+ request_id = self.refresh_dataset(dataset_id=dataset_id, group_id=group_id)
+
+ return request_id
+
+ def _send_request(self, request_type: str, url: str, **kwargs) -> requests.Response:
+ """
+ Send a request to the Power BI REST API.
+
+ :param request_type: The type of the request (GET, POST, PUT, etc.).
+ :param url: The URL against which the request needs to be made.
+ :param kwargs: Additional keyword arguments to be passed to the request function.
+ :return: The response object returned by the request.
+ :raises requests.HTTPError: If the request fails (e.g., non-2xx status code).
+ """
+ self.header: dict[str, str] = {}
+
+ request_funcs: dict[str, Callable[..., requests.Response]] = {
+ "GET": requests.get,
+ "POST": requests.post,
+ }
+
+ func: Callable[..., requests.Response] = request_funcs[request_type.upper()]
+
+ response = func(url=url, headers={"Authorization": f"Bearer {self._get_token()}"}, **kwargs)
+
+ return response
diff --git a/airflow/providers/microsoft/powerbi/operators/__init__.py b/airflow/providers/microsoft/powerbi/operators/__init__.py
new file mode 100644
index 0000000000000..217e5db960782
--- /dev/null
+++ b/airflow/providers/microsoft/powerbi/operators/__init__.py
@@ -0,0 +1,17 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git a/airflow/providers/microsoft/powerbi/operators/powerbi.py b/airflow/providers/microsoft/powerbi/operators/powerbi.py
new file mode 100644
index 0000000000000..18574c9185724
--- /dev/null
+++ b/airflow/providers/microsoft/powerbi/operators/powerbi.py
@@ -0,0 +1,194 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from __future__ import annotations
+
+from functools import cached_property
+from typing import TYPE_CHECKING, Sequence
+
+from airflow.models import BaseOperator, BaseOperatorLink
+from airflow.providers.microsoft.powerbi.hooks.powerbi import (
+ PowerBIDatasetRefreshException,
+ PowerBIDatasetRefreshFields,
+ PowerBIDatasetRefreshStatus,
+ PowerBIHook,
+)
+
+if TYPE_CHECKING:
+ from airflow.models.taskinstancekey import TaskInstanceKey
+ from airflow.utils.context import Context
+
+
+class PowerBILink(BaseOperatorLink):
+ """Construct a link to monitor a dataset in Power BI."""
+
+ name = "Monitor PowerBI Dataset"
+
+ def get_link(self, operator: BaseOperator, *, ti_key: TaskInstanceKey):
+ url = (
+ f"https://app.powerbi.com" # type: ignore[attr-defined]
+ f"/groups/{operator.group_id}/datasets/{operator.dataset_id}" # type: ignore[attr-defined]
+ f"/details?experience=power-bi"
+ )
+
+ return url
+
+
+class PowerBIDatasetRefreshOperator(BaseOperator):
+ """
+ Refreshes a Power BI dataset.
+
+ By default the operator will wait until the refresh has completed before
+ exiting. The refresh status is checked every 60 seconds as a default. This
+ can be changed by specifying a new value for `check_interval`.
+
+ :param dataset_id: The dataset id.
+ :param group_id: The workspace id.
+ :param wait_for_termination: Wait until the pre-existing or current triggered refresh completes before exiting.
+ :param force_refresh: Force refresh if pre-existing refresh found.
+ :param powerbi_conn_id: Airflow Connection ID that contains the connection
+ information for the Power BI account used for authentication.
+ :param timeout: Time in seconds to wait for a dataset to reach a terminal status for non-asynchronous waits. Used only if ``wait_for_termination`` is True.
+ :param check_interval: Number of seconds to wait before rechecking the
+ refresh status.
+ """
+
+ template_fields: Sequence[str] = (
+ "dataset_id",
+ "group_id",
+ )
+ template_fields_renderers = {"parameters": "json"}
+
+ operator_extra_links = (PowerBILink(),)
+
+ def __init__(
+ self,
+ *, # Indicates all the following parameters must be specified using keyword arguments.
+ dataset_id: str,
+ group_id: str,
+ wait_for_termination: bool = True,
+ force_refresh: bool = False,
+ powerbi_conn_id: str = PowerBIHook.default_conn_name,
+ timeout: int = 60 * 60 * 24 * 7,
+ check_interval: int = 60,
+ **kwargs,
+ ) -> None:
+ super().__init__(**kwargs)
+ self.dataset_id = dataset_id
+ self.group_id = group_id
+ self.wait_for_termination = wait_for_termination
+ self.force_refresh = force_refresh
+ self.powerbi_conn_id = powerbi_conn_id
+ self.timeout = timeout
+ self.check_interval = check_interval
+
+ @cached_property
+ def hook(self) -> PowerBIHook:
+ """Create and return an PowerBIHook (cached)."""
+ return PowerBIHook(powerbi_conn_id=self.powerbi_conn_id)
+
+ def execute(self, context: Context):
+ """Refresh the Power BI Dataset."""
+ self.log.info("Check if a refresh is already in progress.")
+ refresh_details = self.hook.get_latest_refresh_details(
+ dataset_id=self.dataset_id, group_id=self.group_id
+ )
+
+ if (
+ refresh_details is None
+ or refresh_details.get(PowerBIDatasetRefreshFields.STATUS.value)
+ in PowerBIDatasetRefreshStatus.TERMINAL_STATUSES
+ ):
+ self.log.info("No pre-existing refresh found.")
+ request_id = self.hook.trigger_dataset_refresh(
+ dataset_id=self.dataset_id,
+ group_id=self.group_id,
+ )
+
+ if self.wait_for_termination:
+ self.log.info("Waiting for dataset refresh to terminate.")
+ if self.hook.wait_for_dataset_refresh_status(
+ request_id=request_id,
+ dataset_id=self.dataset_id,
+ group_id=self.group_id,
+ expected_status=PowerBIDatasetRefreshStatus.COMPLETED,
+ ):
+ self.log.info("Dataset refresh %s has completed successfully.", request_id)
+ else:
+ raise PowerBIDatasetRefreshException(
+ f"Dataset refresh {request_id} has failed or has been cancelled."
+ )
+ else:
+ if (
+ refresh_details.get(PowerBIDatasetRefreshFields.STATUS.value)
+ == PowerBIDatasetRefreshStatus.IN_PROGRESS
+ ):
+ request_id = str(refresh_details.get(PowerBIDatasetRefreshFields.REQUEST_ID.value))
+ self.log.info("Found pre-existing dataset refresh request: %s.", request_id)
+
+ if self.force_refresh or self.wait_for_termination:
+ self.log.info("Waiting for dataset refresh %s to terminate.", request_id)
+ if self.hook.wait_for_dataset_refresh_status(
+ request_id=request_id,
+ dataset_id=self.dataset_id,
+ group_id=self.group_id,
+ expected_status=PowerBIDatasetRefreshStatus.COMPLETED,
+ ):
+ self.log.info(
+ "Pre-existing dataset refresh %s has completed successfully.", request_id
+ )
+ else:
+ raise PowerBIDatasetRefreshException(
+ f"Pre-exisintg dataset refresh {request_id} has failed or has been cancelled."
+ )
+
+ if self.force_refresh:
+ self.log.info("Starting forced refresh.")
+ request_id = self.hook.trigger_dataset_refresh(
+ dataset_id=self.dataset_id,
+ group_id=self.group_id,
+ )
+
+ if self.wait_for_termination:
+ self.log.info("Waiting for dataset refresh to terminate.")
+ if self.hook.wait_for_dataset_refresh_status(
+ request_id=request_id,
+ dataset_id=self.dataset_id,
+ group_id=self.group_id,
+ expected_status=PowerBIDatasetRefreshStatus.COMPLETED,
+ ):
+ self.log.info("Dataset refresh %s has completed successfully.", request_id)
+ else:
+ raise PowerBIDatasetRefreshException(
+ f"Dataset refresh {request_id} has failed or has been cancelled."
+ )
+
+ # Retrieve refresh details after triggering refresh
+ refresh_details = self.hook.get_refresh_details_by_request_id(
+ dataset_id=self.dataset_id, group_id=self.group_id, request_id=request_id
+ )
+
+ request_id = str(refresh_details.get(PowerBIDatasetRefreshFields.REQUEST_ID.value))
+ status = str(refresh_details.get(PowerBIDatasetRefreshFields.STATUS.value))
+ end_time = str(refresh_details.get(PowerBIDatasetRefreshFields.END_TIME.value))
+ error = str(refresh_details.get(PowerBIDatasetRefreshFields.ERROR.value))
+
+ # Xcom Integration
+ context["ti"].xcom_push(key="powerbi_dataset_refresh_id", value=request_id)
+ context["ti"].xcom_push(key="powerbi_dataset_refresh_status", value=status)
+ context["ti"].xcom_push(key="powerbi_dataset_refresh_end_time", value=end_time)
+ context["ti"].xcom_push(key="powerbi_dataset_refresh_error", value=error)
diff --git a/airflow/providers/microsoft/powerbi/provider.yaml b/airflow/providers/microsoft/powerbi/provider.yaml
new file mode 100644
index 0000000000000..3b2dd2563cd56
--- /dev/null
+++ b/airflow/providers/microsoft/powerbi/provider.yaml
@@ -0,0 +1,52 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+---
+package-name: apache-airflow-providers-microsoft-powerbi
+name: Microsoft Power BI
+description: |
+ `Microsoft Power BI `__
+
+state: not-ready
+source-date-epoch: 1707636422
+# note that those versions are maintained by release manager - do not update them manually
+versions:
+ - 1.0.0
+
+dependencies:
+ - apache-airflow>=2.7.0
+ - azure-identity>=1.3.1
+
+# integrations:
+# - integration-name: Microsoft Power BI
+
+operators:
+ - integration-name: Microsoft Power BI
+ python-modules:
+ - airflow.providers.microsoft.powerbi.operators.powerbi
+
+hooks:
+ - integration-name: Microsoft Power BI
+ python-modules:
+ - airflow.providers.microsoft.powerbi.hooks.powerbi
+
+connection-types:
+ - hook-class-name: airflow.providers.microsoft.powerbi.hooks.powerbi.PowerBIHook
+ connection-type: powerbi
+
+extra-links:
+ - airflow.providers.microsoft.powerbi.operators.powerbi.PowerBILink
diff --git a/contributing-docs/12_airflow_dependencies_and_extras.rst b/contributing-docs/12_airflow_dependencies_and_extras.rst
index 3303f4fafb75f..09f8300237122 100644
--- a/contributing-docs/12_airflow_dependencies_and_extras.rst
+++ b/contributing-docs/12_airflow_dependencies_and_extras.rst
@@ -183,11 +183,11 @@ apache.hdfs, apache.hive, apache.impala, apache.kafka, apache.kylin, apache.livy
apache.pinot, apache.spark, apprise, arangodb, asana, atlassian.jira, celery, cloudant,
cncf.kubernetes, cohere, common.io, common.sql, databricks, datadog, dbt.cloud, dingding, discord,
docker, elasticsearch, exasol, fab, facebook, ftp, github, google, grpc, hashicorp, http, imap,
-influxdb, jdbc, jenkins, microsoft.azure, microsoft.mssql, microsoft.psrp, microsoft.winrm, mongo,
-mysql, neo4j, odbc, openai, openfaas, openlineage, opensearch, opsgenie, oracle, pagerduty,
-papermill, pgvector, pinecone, postgres, presto, qdrant, redis, salesforce, samba, segment,
-sendgrid, sftp, singularity, slack, smtp, snowflake, sqlite, ssh, tableau, tabular, telegram,
-teradata, trino, vertica, weaviate, yandex, zendesk
+influxdb, jdbc, jenkins, microsoft.azure, microsoft.mssql, microsoft.powerbi, microsoft.psrp,
+microsoft.winrm, mongo, mysql, neo4j, odbc, openai, openfaas, openlineage, opensearch, opsgenie,
+oracle, pagerduty, papermill, pgvector, pinecone, postgres, presto, qdrant, redis, salesforce,
+samba, segment, sendgrid, sftp, singularity, slack, smtp, snowflake, sqlite, ssh, tableau, tabular,
+telegram, teradata, trino, vertica, weaviate, yandex, zendesk
.. END PROVIDER EXTRAS HERE
diff --git a/dev/breeze/doc/images/output_build-docs.svg b/dev/breeze/doc/images/output_build-docs.svg
index 1769101730720..7efcc7a646dee 100644
--- a/dev/breeze/doc/images/output_build-docs.svg
+++ b/dev/breeze/doc/images/output_build-docs.svg
@@ -195,10 +195,10 @@
atlassian.jira | celery | cloudant | cncf.kubernetes | cohere | common.io | common.sql | databricks | datadog | dbt.cloud | dingding | discord | docker | docker-stack | elasticsearch | exasol | fab | facebook | ftp | github | google | grpc | hashicorp | helm-chart | http | imap | influxdb | jdbc | jenkins | microsoft.azure | microsoft.mssql |
-microsoft.psrp | microsoft.winrm | mongo | mysql | neo4j | odbc | openai | openfaas | openlineage | opensearch |
-opsgenie | oracle | pagerduty | papermill | pgvector | pinecone | postgres | presto | qdrant | redis | salesforce |
-samba | segment | sendgrid | sftp | singularity | slack | smtp | snowflake | sqlite | ssh | tableau | tabular |
-telegram | teradata | trino | vertica | weaviate | yandex | zendesk]...
+microsoft.powerbi | microsoft.psrp | microsoft.winrm | mongo | mysql | neo4j | odbc | openai | openfaas | openlineage
+| opensearch | opsgenie | oracle | pagerduty | papermill | pgvector | pinecone | postgres | presto | qdrant | redis |
+salesforce | samba | segment | sendgrid | sftp | singularity | slack | smtp | snowflake | sqlite | ssh | tableau |
+tabular | telegram | teradata | trino | vertica | weaviate | yandex | zendesk]... Build documents.
diff --git a/dev/breeze/doc/images/output_build-docs.txt b/dev/breeze/doc/images/output_build-docs.txt
index a9ecb4c194bf5..7f7b3633e8876 100644
--- a/dev/breeze/doc/images/output_build-docs.txt
+++ b/dev/breeze/doc/images/output_build-docs.txt
@@ -1 +1 @@
-7391d7b5a523f63bb02bea9ca23216dd
+86440122387e7067d8f581221704bb88
diff --git a/dev/breeze/doc/images/output_release-management_add-back-references.svg b/dev/breeze/doc/images/output_release-management_add-back-references.svg
index 48a3be2224e81..e1b632369bdf6 100644
--- a/dev/breeze/doc/images/output_release-management_add-back-references.svg
+++ b/dev/breeze/doc/images/output_release-management_add-back-references.svg
@@ -143,10 +143,10 @@
atlassian.jira | celery | cloudant | cncf.kubernetes | cohere | common.io | common.sql | databricks | datadog | dbt.cloud | dingding | discord | docker | docker-stack | elasticsearch | exasol | fab | facebook | ftp | github | google | grpc | hashicorp | helm-chart | http | imap | influxdb | jdbc | jenkins | microsoft.azure | microsoft.mssql |
-microsoft.psrp | microsoft.winrm | mongo | mysql | neo4j | odbc | openai | openfaas | openlineage | opensearch |
-opsgenie | oracle | pagerduty | papermill | pgvector | pinecone | postgres | presto | qdrant | redis | salesforce |
-samba | segment | sendgrid | sftp | singularity | slack | smtp | snowflake | sqlite | ssh | tableau | tabular |
-telegram | teradata | trino | vertica | weaviate | yandex | zendesk]...
+microsoft.powerbi | microsoft.psrp | microsoft.winrm | mongo | mysql | neo4j | odbc | openai | openfaas | openlineage
+| opensearch | opsgenie | oracle | pagerduty | papermill | pgvector | pinecone | postgres | presto | qdrant | redis |
+salesforce | samba | segment | sendgrid | sftp | singularity | slack | smtp | snowflake | sqlite | ssh | tableau |
+tabular | telegram | teradata | trino | vertica | weaviate | yandex | zendesk]... Command to add back references for documentation to make it backward compatible.
diff --git a/dev/breeze/doc/images/output_release-management_add-back-references.txt b/dev/breeze/doc/images/output_release-management_add-back-references.txt
index ae51a4106f10a..241527f1b9951 100644
--- a/dev/breeze/doc/images/output_release-management_add-back-references.txt
+++ b/dev/breeze/doc/images/output_release-management_add-back-references.txt
@@ -1 +1 @@
-6cccd29cb919026e925f9c54882c4900
+53a7e2f9f3f1607e94bda5524c8a4db4
diff --git a/dev/breeze/doc/images/output_release-management_publish-docs.svg b/dev/breeze/doc/images/output_release-management_publish-docs.svg
index 55cca08f674cf..3b6844305f348 100644
--- a/dev/breeze/doc/images/output_release-management_publish-docs.svg
+++ b/dev/breeze/doc/images/output_release-management_publish-docs.svg
@@ -200,10 +200,10 @@
atlassian.jira | celery | cloudant | cncf.kubernetes | cohere | common.io | common.sql | databricks | datadog | dbt.cloud | dingding | discord | docker | docker-stack | elasticsearch | exasol | fab | facebook | ftp | github | google | grpc | hashicorp | helm-chart | http | imap | influxdb | jdbc | jenkins | microsoft.azure | microsoft.mssql |
-microsoft.psrp | microsoft.winrm | mongo | mysql | neo4j | odbc | openai | openfaas | openlineage | opensearch |
-opsgenie | oracle | pagerduty | papermill | pgvector | pinecone | postgres | presto | qdrant | redis | salesforce |
-samba | segment | sendgrid | sftp | singularity | slack | smtp | snowflake | sqlite | ssh | tableau | tabular |
-telegram | teradata | trino | vertica | weaviate | yandex | zendesk]...
+microsoft.powerbi | microsoft.psrp | microsoft.winrm | mongo | mysql | neo4j | odbc | openai | openfaas | openlineage
+| opensearch | opsgenie | oracle | pagerduty | papermill | pgvector | pinecone | postgres | presto | qdrant | redis |
+salesforce | samba | segment | sendgrid | sftp | singularity | slack | smtp | snowflake | sqlite | ssh | tableau |
+tabular | telegram | teradata | trino | vertica | weaviate | yandex | zendesk]... Command to publish generated documentation to airflow-site
diff --git a/dev/breeze/doc/images/output_release-management_publish-docs.txt b/dev/breeze/doc/images/output_release-management_publish-docs.txt
index 9529819686ee9..a10735b8e08f8 100644
--- a/dev/breeze/doc/images/output_release-management_publish-docs.txt
+++ b/dev/breeze/doc/images/output_release-management_publish-docs.txt
@@ -1 +1 @@
-a6be6aad28ce6b74e0b3d075b03aabc4
+94ef151eeca2c452dea89eebf311b3b1
diff --git a/dev/breeze/doc/images/output_sbom_generate-providers-requirements.svg b/dev/breeze/doc/images/output_sbom_generate-providers-requirements.svg
index edf41b8a49b14..ce61c9a747fe1 100644
--- a/dev/breeze/doc/images/output_sbom_generate-providers-requirements.svg
+++ b/dev/breeze/doc/images/output_sbom_generate-providers-requirements.svg
@@ -1,4 +1,4 @@
-