Skip to content

Commit d305876

Browse files
authored
Remove redundant None provided as default to dict.get() (#11448)
1 parent d8d13fa commit d305876

File tree

26 files changed

+45
-45
lines changed

26 files changed

+45
-45
lines changed

airflow/api/common/experimental/trigger_dag.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,7 @@ def _trigger_dag(
8989
state=State.RUNNING,
9090
conf=run_conf,
9191
external_trigger=True,
92-
dag_hash=dag_bag.dags_hash.get(dag_id, None),
92+
dag_hash=dag_bag.dags_hash.get(dag_id),
9393
)
9494

9595
triggers.append(trigger)

airflow/api_connexion/endpoints/connection_endpoint.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -97,7 +97,7 @@ def patch_connection(connection_id, session, update_mask=None):
9797
"Connection not found",
9898
detail=f"The Connection with connection_id: `{connection_id}` was not found",
9999
)
100-
if data.get('conn_id', None) and connection.conn_id != data['conn_id']:
100+
if data.get('conn_id') and connection.conn_id != data['conn_id']:
101101
raise BadRequest(detail="The connection_id cannot be updated.")
102102
if update_mask:
103103
update_mask = [i.strip() for i in update_mask]

airflow/api_connexion/endpoints/log_endpoint.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ def get_log(session, dag_id, dag_run_id, task_id, task_try_number, full_content=
4242
except BadSignature:
4343
raise BadRequest("Bad Signature. Please use only the tokens provided by the API.")
4444

45-
if metadata.get('download_logs', None) and metadata['download_logs']:
45+
if metadata.get('download_logs') and metadata['download_logs']:
4646
full_content = True
4747

4848
if full_content:

airflow/api_connexion/exceptions.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ def common_error_handler(exception):
4242
"""
4343
if isinstance(exception, ProblemException):
4444

45-
link = EXCEPTIONS_LINK_MAP.get(exception.status, None)
45+
link = EXCEPTIONS_LINK_MAP.get(exception.status)
4646
if link:
4747
response = problem(
4848
status=exception.status,

airflow/api_connexion/security.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -81,7 +81,7 @@ def requires_access_decorator(func: T):
8181
def decorated(*args, **kwargs):
8282

8383
check_authentication()
84-
check_authorization(permissions, kwargs.get('dag_id', None))
84+
check_authorization(permissions, kwargs.get('dag_id'))
8585

8686
return func(*args, **kwargs)
8787

airflow/jobs/scheduler_job.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1555,7 +1555,7 @@ def _create_dag_runs(self, dag_models: Iterable[DagModel], session: Session) ->
15551555
"""
15561556
for dag_model in dag_models:
15571557
dag = self.dagbag.get_dag(dag_model.dag_id, session=session)
1558-
dag_hash = self.dagbag.dags_hash.get(dag.dag_id, None)
1558+
dag_hash = self.dagbag.dags_hash.get(dag.dag_id)
15591559
dag.create_dagrun(
15601560
run_type=DagRunType.SCHEDULED,
15611561
execution_date=dag_model.next_dagrun,

airflow/operators/python.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,7 @@ def __init__(
187187
multiple_outputs: bool = False,
188188
**kwargs
189189
) -> None:
190-
kwargs['task_id'] = self._get_unique_task_id(task_id, kwargs.get('dag', None))
190+
kwargs['task_id'] = self._get_unique_task_id(task_id, kwargs.get('dag'))
191191
super().__init__(**kwargs)
192192
self.python_callable = python_callable
193193

airflow/providers/amazon/aws/hooks/step_function.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ def start_execution(
6767
self.log.info('Executing Step Function State Machine: %s', state_machine_arn)
6868

6969
response = self.conn.start_execution(**execution_args)
70-
return response.get('executionArn', None)
70+
return response.get('executionArn')
7171

7272
def describe_execution(self, execution_arn: str) -> dict:
7373
"""

airflow/providers/apache/spark/hooks/spark_submit.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -209,9 +209,9 @@ def _resolve_connection(self) -> Dict[str, Any]:
209209

210210
# Determine optional yarn queue from the extra field
211211
extra = conn.extra_dejson
212-
conn_data['queue'] = extra.get('queue', None)
213-
conn_data['deploy_mode'] = extra.get('deploy-mode', None)
214-
conn_data['spark_home'] = extra.get('spark-home', None)
212+
conn_data['queue'] = extra.get('queue')
213+
conn_data['deploy_mode'] = extra.get('deploy-mode')
214+
conn_data['spark_home'] = extra.get('spark-home')
215215
conn_data['spark_binary'] = self._spark_binary or extra.get('spark-binary', "spark-submit")
216216
conn_data['namespace'] = extra.get('namespace')
217217
except AirflowException:

airflow/providers/google/cloud/operators/functions.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -480,6 +480,6 @@ def execute(self, context: Dict):
480480
location=self.location,
481481
project_id=self.project_id,
482482
)
483-
self.log.info('Function called successfully. Execution id %s', result.get('executionId', None))
484-
self.xcom_push(context=context, key='execution_id', value=result.get('executionId', None))
483+
self.log.info('Function called successfully. Execution id %s', result.get('executionId'))
484+
self.xcom_push(context=context, key='execution_id', value=result.get('executionId'))
485485
return result

0 commit comments

Comments
 (0)