Skip to content

Commit 9549274

Browse files
authored
Upgrade black to 20.8b1 (#10818)
1 parent 004e1d8 commit 9549274

File tree

334 files changed

+4868
-1358
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

334 files changed

+4868
-1358
lines changed

.pre-commit-config.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -150,7 +150,7 @@ repos:
150150
hooks:
151151
- id: check-hooks-apply
152152
- repo: https://github.com/psf/black
153-
rev: 19.10b0
153+
rev: 20.8b1
154154
hooks:
155155
- id: black
156156
files: api_connexion/.*\.py|.*providers.*\.py

airflow/api_connexion/endpoints/health_endpoint.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,10 @@ def get_health():
4040

4141
payload = {
4242
"metadatabase": {"status": metadatabase_status},
43-
"scheduler": {"status": scheduler_status, "latest_scheduler_heartbeat": latest_scheduler_heartbeat,},
43+
"scheduler": {
44+
"status": scheduler_status,
45+
"latest_scheduler_heartbeat": latest_scheduler_heartbeat,
46+
},
4447
}
4548

4649
return health_schema.dump(payload)

airflow/api_connexion/endpoints/variable_endpoint.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,12 @@ def get_variables(session, limit: Optional[int], offset: Optional[int] = None) -
6464
if limit:
6565
query = query.limit(limit)
6666
variables = query.all()
67-
return variable_collection_schema.dump({"variables": variables, "total_entries": total_entries,})
67+
return variable_collection_schema.dump(
68+
{
69+
"variables": variables,
70+
"total_entries": total_entries,
71+
}
72+
)
6873

6974

7075
@security.requires_authentication

airflow/api_connexion/schemas/error_schema.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,9 @@ class Meta:
3333
import_error_id = auto_field("id", dump_only=True)
3434
timestamp = auto_field(format="iso")
3535
filename = auto_field()
36-
stack_trace = auto_field("stacktrace",)
36+
stack_trace = auto_field(
37+
"stacktrace",
38+
)
3739

3840

3941
class ImportErrorCollection(NamedTuple):

airflow/providers/amazon/aws/example_dags/example_datasync_1.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,9 @@
5757
# [END howto_operator_datasync_1_1]
5858

5959
with models.DAG(
60-
"example_datasync_1_2", start_date=days_ago(1), schedule_interval=None, # Override to match your needs
60+
"example_datasync_1_2",
61+
start_date=days_ago(1),
62+
schedule_interval=None, # Override to match your needs
6163
) as dag:
6264
# [START howto_operator_datasync_1_2]
6365
datasync_task_2 = AWSDataSyncOperator(

airflow/providers/amazon/aws/example_dags/example_ecs_fargate.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,12 @@
5656
task_definition="hello-world",
5757
launch_type="FARGATE",
5858
overrides={
59-
"containerOverrides": [{"name": "hello-world-container", "command": ["echo", "hello", "world"],},],
59+
"containerOverrides": [
60+
{
61+
"name": "hello-world-container",
62+
"command": ["echo", "hello", "world"],
63+
},
64+
],
6065
},
6166
network_configuration={
6267
"awsvpcConfiguration": {

airflow/providers/amazon/aws/example_dags/example_s3_bucket.py

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,9 @@ def upload_keys():
3131
s3_hook = S3Hook()
3232
for i in range(0, 3):
3333
s3_hook.load_string(
34-
string_data="input", key=f"path/data{i}", bucket_name=BUCKET_NAME,
34+
string_data="input",
35+
key=f"path/data{i}",
36+
bucket_name=BUCKET_NAME,
3537
)
3638

3739

@@ -44,15 +46,19 @@ def upload_keys():
4446
) as dag:
4547

4648
create_bucket = S3CreateBucketOperator(
47-
task_id='s3_bucket_dag_create', bucket_name=BUCKET_NAME, region_name='us-east-1',
49+
task_id='s3_bucket_dag_create',
50+
bucket_name=BUCKET_NAME,
51+
region_name='us-east-1',
4852
)
4953

5054
add_keys_to_bucket = PythonOperator(
5155
task_id="s3_bucket_dag_add_keys_to_bucket", python_callable=upload_keys
5256
)
5357

5458
delete_bucket = S3DeleteBucketOperator(
55-
task_id='s3_bucket_dag_delete', bucket_name=BUCKET_NAME, force_delete=True,
59+
task_id='s3_bucket_dag_delete',
60+
bucket_name=BUCKET_NAME,
61+
force_delete=True,
5662
)
5763

5864
create_bucket >> add_keys_to_bucket >> delete_bucket

airflow/providers/amazon/aws/hooks/base_aws.py

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,9 @@ def _create_basic_session(self, session_kwargs: Dict[str, Any]) -> boto3.session
7171
self.log.info("Retrieving region_name from Connection.extra_config['region_name']")
7272
region_name = self.extra_config["region_name"]
7373
self.log.info(
74-
"Creating session with aws_access_key_id=%s region_name=%s", aws_access_key_id, region_name,
74+
"Creating session with aws_access_key_id=%s region_name=%s",
75+
aws_access_key_id,
76+
region_name,
7577
)
7678

7779
return boto3.session.Session(
@@ -161,7 +163,9 @@ def _assume_role(
161163
assume_role_kwargs["ExternalId"] = self.extra_config.get("external_id")
162164
role_session_name = f"Airflow_{self.conn.conn_id}"
163165
self.log.info(
164-
"Doing sts_client.assume_role to role_arn=%s (role_session_name=%s)", role_arn, role_session_name,
166+
"Doing sts_client.assume_role to role_arn=%s (role_session_name=%s)",
167+
role_arn,
168+
role_session_name,
165169
)
166170
return sts_client.assume_role(
167171
RoleArn=role_arn, RoleSessionName=role_session_name, **assume_role_kwargs
@@ -317,7 +321,8 @@ def _get_credentials(self, region_name):
317321
# http://boto3.readthedocs.io/en/latest/guide/configuration.html
318322

319323
self.log.info(
320-
"Creating session using boto3 credential strategy region_name=%s", region_name,
324+
"Creating session using boto3 credential strategy region_name=%s",
325+
region_name,
321326
)
322327
session = boto3.session.Session(region_name=region_name)
323328
return session, None

airflow/providers/amazon/aws/hooks/batch_client.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -342,7 +342,10 @@ def poll_job_status(self, job_id: str, match_status: List[str]) -> bool:
342342
job = self.get_job_description(job_id)
343343
job_status = job.get("status")
344344
self.log.info(
345-
"AWS Batch job (%s) check status (%s) in %s", job_id, job_status, match_status,
345+
"AWS Batch job (%s) check status (%s) in %s",
346+
job_id,
347+
job_status,
348+
match_status,
346349
)
347350

348351
if job_status in match_status:

airflow/providers/amazon/aws/hooks/redshift.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -116,6 +116,7 @@ def create_cluster_snapshot(self, snapshot_identifier: str, cluster_identifier:
116116
:type cluster_identifier: str
117117
"""
118118
response = self.get_conn().create_cluster_snapshot(
119-
SnapshotIdentifier=snapshot_identifier, ClusterIdentifier=cluster_identifier,
119+
SnapshotIdentifier=snapshot_identifier,
120+
ClusterIdentifier=cluster_identifier,
120121
)
121122
return response['Snapshot'] if response['Snapshot'] else None

0 commit comments

Comments
 (0)