Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions tests/system/providers/google/ads/example_ads.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
import os
from datetime import datetime

from airflow import models
from airflow.models.dag import DAG
from airflow.providers.google.ads.operators.ads import GoogleAdsListAccountsOperator
from airflow.providers.google.ads.transfers.ads_to_gcs import GoogleAdsToGcsOperator
from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator
Expand Down Expand Up @@ -72,7 +72,7 @@
]
# [END howto_google_ads_env_variables]

with models.DAG(
with DAG(
DAG_ID,
schedule="@once",
start_date=datetime(2021, 1, 1),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
from copy import deepcopy
from datetime import datetime

from airflow import models
from airflow.models.dag import DAG
from airflow.providers.google.cloud.hooks.automl import CloudAutoMLHook
from airflow.providers.google.cloud.operators.automl import (
AutoMLCreateDatasetOperator,
Expand Down Expand Up @@ -72,7 +72,7 @@ def get_target_column_spec(columns_specs: list[dict], column_name: str) -> str:
raise Exception(f"Unknown target column: {column_name}")


with models.DAG(
with DAG(
dag_id=DAG_ID,
schedule="@once",
start_date=datetime(2021, 1, 1),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@

from google.protobuf.struct_pb2 import Value

from airflow import models
from airflow.models.dag import DAG
from airflow.providers.google.cloud.hooks.automl import CloudAutoMLHook
from airflow.providers.google.cloud.operators.automl import (
AutoMLBatchPredictOperator,
Expand Down Expand Up @@ -110,7 +110,7 @@ def get_target_column_spec(columns_specs: list[dict], column_name: str) -> str:
raise Exception(f"Unknown target column: {column_name}")


with models.DAG(
with DAG(
dag_id=DAG_ID,
schedule="@once",
start_date=datetime(2021, 1, 1),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
from google.cloud.aiplatform import schema
from google.protobuf.struct_pb2 import Value

from airflow import models
from airflow.models.dag import DAG
from airflow.models.xcom_arg import XComArg
from airflow.providers.google.cloud.hooks.automl import CloudAutoMLHook
from airflow.providers.google.cloud.operators.gcs import (
Expand Down Expand Up @@ -75,7 +75,7 @@
extract_object_id = CloudAutoMLHook.extract_object_id

# Example DAG for AutoML Natural Language Text Classification
with models.DAG(
with DAG(
DAG_ID,
schedule="@once",
start_date=datetime(2021, 1, 1),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
from google.cloud.aiplatform import schema
from google.protobuf.struct_pb2 import Value

from airflow import models
from airflow.models.dag import DAG
from airflow.models.xcom_arg import XComArg
from airflow.providers.google.cloud.hooks.automl import CloudAutoMLHook
from airflow.providers.google.cloud.operators.gcs import (
Expand Down Expand Up @@ -74,7 +74,7 @@
extract_object_id = CloudAutoMLHook.extract_object_id

# Example DAG for AutoML Natural Language Entities Extraction
with models.DAG(
with DAG(
DAG_ID,
schedule="@once", # Override to match your needs
start_date=datetime(2021, 1, 1),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
from google.cloud.aiplatform import schema
from google.protobuf.struct_pb2 import Value

from airflow import models
from airflow.models.dag import DAG
from airflow.models.xcom_arg import XComArg
from airflow.providers.google.cloud.hooks.automl import CloudAutoMLHook
from airflow.providers.google.cloud.operators.gcs import (
Expand Down Expand Up @@ -75,7 +75,7 @@
extract_object_id = CloudAutoMLHook.extract_object_id

# Example DAG for AutoML Natural Language Text Sentiment
with models.DAG(
with DAG(
DAG_ID,
schedule="@once",
start_date=datetime(2021, 1, 1),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,8 @@

from google.cloud import storage

from airflow import models
from airflow.decorators import task
from airflow.models.dag import DAG
from airflow.models.xcom_arg import XComArg
from airflow.providers.google.cloud.hooks.automl import CloudAutoMLHook
from airflow.providers.google.cloud.operators.automl import (
Expand Down Expand Up @@ -74,7 +74,7 @@


# Example DAG for AutoML Translation
with models.DAG(
with DAG(
DAG_ID,
schedule="@once",
start_date=datetime(2021, 1, 1),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
from datetime import datetime
from typing import cast

from airflow import models
from airflow.models.dag import DAG
from airflow.models.xcom_arg import XComArg
from airflow.providers.google.cloud.hooks.automl import CloudAutoMLHook
from airflow.providers.google.cloud.operators.automl import (
Expand Down Expand Up @@ -68,7 +68,7 @@


# Example DAG for AutoML Video Intelligence Classification
with models.DAG(
with DAG(
DAG_ID,
schedule="@once",
start_date=datetime(2021, 1, 1),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
from datetime import datetime
from typing import cast

from airflow import models
from airflow.models.dag import DAG
from airflow.models.xcom_arg import XComArg
from airflow.providers.google.cloud.hooks.automl import CloudAutoMLHook
from airflow.providers.google.cloud.operators.automl import (
Expand Down Expand Up @@ -68,7 +68,7 @@


# Example DAG for AutoML Video Intelligence Object Tracking
with models.DAG(
with DAG(
DAG_ID,
schedule="@once",
start_date=datetime(2021, 1, 1),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,8 @@

from google.cloud import storage

from airflow import models
from airflow.decorators import task
from airflow.models.dag import DAG
from airflow.models.xcom_arg import XComArg
from airflow.providers.google.cloud.hooks.automl import CloudAutoMLHook
from airflow.providers.google.cloud.operators.automl import (
Expand Down Expand Up @@ -73,7 +73,7 @@
extract_object_id = CloudAutoMLHook.extract_object_id

# Example DAG for AutoML Vision Classification
with models.DAG(
with DAG(
DAG_ID,
schedule="@once", # Override to match your needs
start_date=datetime(2021, 1, 1),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
from datetime import datetime
from typing import cast

from airflow import models
from airflow.models.dag import DAG
from airflow.models.xcom_arg import XComArg
from airflow.providers.google.cloud.hooks.automl import CloudAutoMLHook
from airflow.providers.google.cloud.operators.automl import (
Expand Down Expand Up @@ -67,7 +67,7 @@


# Example DAG for AutoML Vision Object Detection
with models.DAG(
with DAG(
DAG_ID,
schedule="@once", # Override to match your needs
start_date=datetime(2021, 1, 1),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
import os
from datetime import datetime

from airflow import DAG
from airflow.models.dag import DAG
from airflow.providers.google.cloud.transfers.azure_blob_to_gcs import AzureBlobStorageToGCSOperator
from airflow.providers.microsoft.azure.sensors.wasb import (
WasbBlobSensor,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
import os
from datetime import datetime, timedelta

from airflow import DAG
from airflow.models.dag import DAG
from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator
from airflow.providers.google.cloud.transfers.azure_fileshare_to_gcs import AzureFileShareToGCSOperator
from airflow.utils.trigger_rule import TriggerRule
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
import os
from datetime import datetime

from airflow import models
from airflow.models.dag import DAG
from airflow.operators.bash import BashOperator
from airflow.providers.google.cloud.operators.bigquery import (
BigQueryCreateEmptyDatasetOperator,
Expand All @@ -39,7 +39,7 @@
DATASET_NAME = f"dataset_{DAG_ID}_{ENV_ID}"


with models.DAG(
with DAG(
DAG_ID,
schedule="@once",
start_date=datetime(2021, 1, 1),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,8 @@
from pathlib import Path
from typing import cast

from airflow import models
from airflow.models.baseoperator import chain
from airflow.models.dag import DAG
from airflow.models.xcom_arg import XComArg
from airflow.providers.google.cloud.operators.bigquery import (
BigQueryCreateEmptyDatasetOperator,
Expand Down Expand Up @@ -81,7 +81,7 @@

# [END howto_bigquery_dts_create_args]

with models.DAG(
with DAG(
DAG_ID,
schedule="@once",
start_date=datetime(2021, 1, 1),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
from datetime import datetime
from pathlib import Path

from airflow import models
from airflow.models.dag import DAG
from airflow.providers.google.cloud.operators.bigquery import (
BigQueryCreateEmptyDatasetOperator,
BigQueryCreateExternalTableOperator,
Expand All @@ -43,7 +43,7 @@
CSV_FILE_LOCAL_PATH = str(Path(__file__).parent / "resources" / "us-states.csv")


with models.DAG(
with DAG(
DAG_ID,
schedule="@once",
start_date=datetime(2021, 1, 1),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
import os
from datetime import datetime

from airflow import models
from airflow.models.dag import DAG
from airflow.providers.google.cloud.operators.bigquery import (
BigQueryCreateEmptyDatasetOperator,
BigQueryCreateEmptyTableOperator,
Expand All @@ -38,7 +38,7 @@
DATASET_NAME = f"dataset_{DAG_ID}_{ENV_ID}"


with models.DAG(
with DAG(
DAG_ID,
schedule="@once",
start_date=datetime(2021, 1, 1),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
import os
from datetime import datetime

from airflow import models
from airflow.models.dag import DAG
from airflow.operators.bash import BashOperator
from airflow.providers.google.cloud.operators.bigquery import (
BigQueryCheckOperator,
Expand Down Expand Up @@ -68,7 +68,7 @@
)
# [END howto_operator_bigquery_query]

with models.DAG(
with DAG(
DAG_ID,
schedule="@once",
start_date=datetime(2021, 1, 1),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
import os
from datetime import datetime, timedelta

from airflow import DAG
from airflow.models.dag import DAG
from airflow.operators.bash import BashOperator
from airflow.providers.google.cloud.operators.bigquery import (
BigQueryCheckOperator,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
import os
from datetime import datetime

from airflow import models
from airflow.models.dag import DAG
from airflow.providers.google.cloud.operators.bigquery import (
BigQueryCreateEmptyDatasetOperator,
BigQueryCreateEmptyTableOperator,
Expand Down Expand Up @@ -56,7 +56,7 @@
]


with models.DAG(
with DAG(
DAG_ID,
schedule="@once",
start_date=datetime(2021, 1, 1),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
from datetime import datetime
from pathlib import Path

from airflow import models
from airflow.models.dag import DAG
from airflow.providers.google.cloud.operators.bigquery import (
BigQueryCreateEmptyDatasetOperator,
BigQueryCreateEmptyTableOperator,
Expand All @@ -52,7 +52,7 @@
GCS_PATH_TO_SCHEMA_JSON = f"gs://{BUCKET_NAME}/{SCHEMA_JSON_DESTINATION}"


with models.DAG(
with DAG(
DAG_ID,
schedule="@once",
start_date=datetime(2021, 1, 1),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
import os
from datetime import datetime

from airflow import models
from airflow.models.dag import DAG
from airflow.providers.google.cloud.operators.bigquery import (
BigQueryCreateEmptyDatasetOperator,
BigQueryCreateEmptyTableOperator,
Expand All @@ -41,7 +41,7 @@
LOCATION = "US"


with models.DAG(
with DAG(
DAG_ID,
schedule="@once",
start_date=datetime(2021, 1, 1),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
import os
from datetime import datetime

from airflow import models
from airflow.models.dag import DAG
from airflow.providers.google.cloud.operators.bigquery import (
BigQueryCreateEmptyDatasetOperator,
BigQueryCreateEmptyTableOperator,
Expand All @@ -43,7 +43,7 @@
TABLE = "test"


with models.DAG(
with DAG(
DAG_ID,
schedule="@once",
start_date=datetime(2021, 1, 1),
Expand Down
Loading