Skip to content

Commit bc3fc8c

Browse files
authored
Migrate Google azure_fileshare example DAG to new design AIP-47 (#24349)
related: #22430, #22447
1 parent 6ab02b6 commit bc3fc8c

File tree

3 files changed

+40
-91
lines changed

3 files changed

+40
-91
lines changed

docs/apache-airflow-providers-google/operators/transfer/azure_fileshare_to_gcs.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ All parameters are described in the reference documentation - :class:`~airflow.p
3838

3939
An example operator call might look like this:
4040

41-
.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_azure_fileshare_to_gcs.py
41+
.. exampleinclude:: /../../tests/system/providers/google/azure/example_azure_fileshare_to_gcs.py
4242
:language: python
4343
:dedent: 4
4444
:start-after: [START howto_operator_azure_fileshare_to_gcs_basic]

tests/providers/google/cloud/transfers/test_azure_fileshare_to_gcs_system.py

Lines changed: 0 additions & 85 deletions
This file was deleted.

airflow/providers/google/cloud/example_dags/example_azure_fileshare_to_gcs.py renamed to tests/system/providers/google/azure/example_azure_fileshare_to_gcs.py

Lines changed: 39 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -18,15 +18,20 @@
1818
from datetime import datetime, timedelta
1919

2020
from airflow import DAG
21+
from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator
2122
from airflow.providers.google.cloud.transfers.azure_fileshare_to_gcs import AzureFileShareToGCSOperator
23+
from airflow.utils.trigger_rule import TriggerRule
2224

23-
DEST_GCS_BUCKET = os.environ.get('GCP_GCS_BUCKET', 'gs://INVALID BUCKET NAME')
25+
ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
26+
PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT")
27+
DAG_ID = 'azure_fileshare_to_gcs_example'
28+
29+
BUCKET_NAME = f"bucket_{DAG_ID}_{ENV_ID}"
2430
AZURE_SHARE_NAME = os.environ.get('AZURE_SHARE_NAME', 'test-azure-share')
2531
AZURE_DIRECTORY_NAME = "test-azure-dir"
2632

27-
2833
with DAG(
29-
dag_id='azure_fileshare_to_gcs_example',
34+
dag_id=DAG_ID,
3035
default_args={
3136
'owner': 'airflow',
3237
'depends_on_past': False,
@@ -39,16 +44,45 @@
3944
schedule_interval='@once',
4045
start_date=datetime(2021, 1, 1),
4146
catchup=False,
42-
tags=['example'],
47+
tags=['example', 'azure'],
4348
) as dag:
49+
create_bucket = GCSCreateBucketOperator(
50+
task_id="create_bucket", bucket_name=BUCKET_NAME, project_id=PROJECT_ID
51+
)
52+
4453
# [START howto_operator_azure_fileshare_to_gcs_basic]
4554
sync_azure_files_with_gcs = AzureFileShareToGCSOperator(
4655
task_id='sync_azure_files_with_gcs',
4756
share_name=AZURE_SHARE_NAME,
48-
dest_gcs=DEST_GCS_BUCKET,
57+
dest_gcs=BUCKET_NAME,
4958
directory_name=AZURE_DIRECTORY_NAME,
5059
replace=False,
5160
gzip=True,
5261
google_impersonation_chain=None,
5362
)
5463
# [END howto_operator_azure_fileshare_to_gcs_basic]
64+
65+
delete_bucket = GCSDeleteBucketOperator(
66+
task_id="delete_bucket", bucket_name=BUCKET_NAME, trigger_rule=TriggerRule.ALL_DONE
67+
)
68+
69+
(
70+
# TEST SETUP
71+
create_bucket
72+
# TEST BODY
73+
>> sync_azure_files_with_gcs
74+
# TEST TEARDOWN
75+
>> delete_bucket
76+
)
77+
78+
from tests.system.utils.watcher import watcher
79+
80+
# This test needs watcher in order to properly mark success/failure
81+
# when "tearDown" task with trigger rule is part of the DAG
82+
list(dag.tasks) >> watcher()
83+
84+
85+
from tests.system.utils import get_test_run # noqa: E402
86+
87+
# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
88+
test_run = get_test_run(dag)

0 commit comments

Comments
 (0)