File tree Expand file tree Collapse file tree 2 files changed +14
-4
lines changed
airflow/providers/google/cloud/operators
tests/system/providers/google/cloud/dataproc Expand file tree Collapse file tree 2 files changed +14
-4
lines changed Original file line number Diff line number Diff line change @@ -1096,6 +1096,10 @@ def on_kill(self) -> None:
10961096class DataprocSubmitPigJobOperator (DataprocJobBaseOperator ):
10971097 """Start a Pig query Job on a Cloud DataProc cluster.
10981098
1099+ .. seealso::
1100+ This operator is deprecated, please use
1101+ :class:`~airflow.providers.google.cloud.operators.dataproc.DataprocSubmitJobOperator`:
1102+
10991103 The parameters of the operation will be passed to the cluster.
11001104
11011105 It's a good practice to define dataproc_* parameters in the default_args of the dag
Original file line number Diff line number Diff line change 3131)
3232from airflow .utils .trigger_rule import TriggerRule
3333
34- ENV_ID = os .environ .get ("SYSTEM_TESTS_ENV_ID" )
3534DAG_ID = "dataproc_pig"
35+ ENV_ID = os .environ .get ("SYSTEM_TESTS_ENV_ID" )
3636PROJECT_ID = os .environ .get ("SYSTEM_TESTS_GCP_PROJECT" )
3737
3838CLUSTER_NAME = f"cluster-dataproc-pig-{ ENV_ID } "
4141
4242
4343# Cluster definition
44-
4544CLUSTER_CONFIG = {
4645 "master_config" : {
4746 "num_instances" : 1 ,
7271 schedule = "@once" ,
7372 start_date = datetime (2021 , 1 , 1 ),
7473 catchup = False ,
75- tags = ["example" , "dataproc" ],
74+ tags = ["example" , "dataproc" , "pig" ],
7675) as dag :
7776 create_cluster = DataprocCreateClusterOperator (
7877 task_id = "create_cluster" ,
9493 trigger_rule = TriggerRule .ALL_DONE ,
9594 )
9695
97- create_cluster >> pig_task >> delete_cluster
96+ (
97+ # TEST SETUP
98+ create_cluster
99+ # TEST BODY
100+ >> pig_task
101+ # TEST TEARDOWN
102+ >> delete_cluster
103+ )
98104
99105 from tests .system .utils .watcher import watcher
100106
You can’t perform that action at this time.
0 commit comments