Skip to content

Commit 886afaf

Browse files
authored
Add example dag and system test for LocalFilesystemToGCSOperator (#9043)
1 parent a779c4d commit 886afaf

File tree

5 files changed

+149
-3
lines changed

5 files changed

+149
-3
lines changed
Lines changed: 44 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,44 @@
1+
#
2+
# Licensed to the Apache Software Foundation (ASF) under one
3+
# or more contributor license agreements. See the NOTICE file
4+
# distributed with this work for additional information
5+
# regarding copyright ownership. The ASF licenses this file
6+
# to you under the Apache License, Version 2.0 (the
7+
# "License"); you may not use this file except in compliance
8+
# with the License. You may obtain a copy of the License at
9+
#
10+
# http://www.apache.org/licenses/LICENSE-2.0
11+
#
12+
# Unless required by applicable law or agreed to in writing,
13+
# software distributed under the License is distributed on an
14+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15+
# KIND, either express or implied. See the License for the
16+
# specific language governing permissions and limitations
17+
# under the License.
18+
19+
import os
20+
21+
from airflow import models
22+
from airflow.providers.google.cloud.operators.local_to_gcs import LocalFilesystemToGCSOperator
23+
from airflow.utils import dates
24+
25+
# [START howto_gcs_environment_variables]
26+
BUCKET_NAME = os.environ.get('GCP_GCS_BUCKET', 'example-bucket-name')
27+
PATH_TO_UPLOAD_FILE = os.environ.get('GCP_GCS_PATH_TO_UPLOAD_FILE', 'example-text.txt')
28+
DESTINATION_FILE_LOCATION = os.environ.get('GCP_GCS_DESTINATION_FILE_LOCATION', 'example-text.txt')
29+
# [END howto_gcs_environment_variables]
30+
31+
with models.DAG(
32+
'example_local_to_gcs',
33+
default_args=dict(start_date=dates.days_ago(1)),
34+
schedule_interval=None,
35+
tags=['example']
36+
) as dag:
37+
# [START howto_operator_local_filesystem_to_gcs]
38+
upload_file = LocalFilesystemToGCSOperator(
39+
task_id="upload_file",
40+
src=PATH_TO_UPLOAD_FILE,
41+
dst=DESTINATION_FILE_LOCATION,
42+
bucket=BUCKET_NAME,
43+
)
44+
# [END howto_operator_local_filesystem_to_gcs]
Lines changed: 58 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,58 @@
1+
.. Licensed to the Apache Software Foundation (ASF) under one
2+
or more contributor license agreements. See the NOTICE file
3+
distributed with this work for additional information
4+
regarding copyright ownership. The ASF licenses this file
5+
to you under the Apache License, Version 2.0 (the
6+
"License"); you may not use this file except in compliance
7+
with the License. You may obtain a copy of the License at
8+
9+
.. http://www.apache.org/licenses/LICENSE-2.0
10+
11+
.. Unless required by applicable law or agreed to in writing,
12+
software distributed under the License is distributed on an
13+
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14+
KIND, either express or implied. See the License for the
15+
specific language governing permissions and limitations
16+
under the License.
17+
18+
19+
Upload data from Local Filesystem to Google Cloud Storage
20+
=========================================================
21+
The `Google Cloud Storage <https://cloud.google.com/storage/>`__ (GCS) is used to store large data from various applications.
22+
This page shows how to upload data from local filesystem to GCS.
23+
24+
.. contents::
25+
:depth: 1
26+
:local:
27+
28+
29+
Prerequisite Tasks
30+
^^^^^^^^^^^^^^^^^^
31+
32+
.. include:: _partials/prerequisite_tasks.rst
33+
34+
.. _howto/operator:LocalFilesystemToGCSOperator:
35+
36+
LocalFileSystemToGCSOperator
37+
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
38+
39+
:class:`~airflow.providers.google.cloud.operators.local_to_gcs.LocalFilesystemToGCSOperator` allows you to upload
40+
data from local filesystem to GCS.
41+
42+
When you use this operator, you can optionally compress the data being uploaded.
43+
44+
Below is an example of using this operator to upload a file to GCS.
45+
46+
.. exampleinclude:: ../../../../airflow/providers/google/cloud/example_dags/example_local_to_gcs.py
47+
:language: python
48+
:dedent: 0
49+
:start-after: [START howto_operator_local_filesystem_to_gcs]
50+
:end-before: [END howto_operator_local_filesystem_to_gcs]
51+
52+
53+
Reference
54+
---------
55+
56+
For further information, look at:
57+
58+
* `Google Cloud Storage Documentation <https://cloud.google.com/storage/>`__

docs/operators-and-hooks-ref.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -918,7 +918,7 @@ These integrations allow you to copy data from/to Google Cloud Platform.
918918

919919
* - Local
920920
- `Google Cloud Storage (GCS) <https://cloud.google.com/gcs/>`__
921-
-
921+
- :doc:`How to use <howto/operator/gcp/local_to_gcs>`
922922
- :mod:`airflow.providers.google.cloud.operators.local_to_gcs`
923923

924924
* - `Microsoft SQL Server (MSSQL) <https://www.microsoft.com/pl-pl/sql-server/sql-server-downloads>`__
Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
#
2+
# Licensed to the Apache Software Foundation (ASF) under one
3+
# or more contributor license agreements. See the NOTICE file
4+
# distributed with this work for additional information
5+
# regarding copyright ownership. The ASF licenses this file
6+
# to you under the Apache License, Version 2.0 (the
7+
# "License"); you may not use this file except in compliance
8+
# with the License. You may obtain a copy of the License at
9+
#
10+
# http://www.apache.org/licenses/LICENSE-2.0
11+
#
12+
# Unless required by applicable law or agreed to in writing,
13+
# software distributed under the License is distributed on an
14+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15+
# KIND, either express or implied. See the License for the
16+
# specific language governing permissions and limitations
17+
# under the License.
18+
import os
19+
20+
import pytest
21+
22+
from airflow.providers.google.cloud.example_dags.example_local_to_gcs import BUCKET_NAME, PATH_TO_UPLOAD_FILE
23+
from tests.providers.google.cloud.utils.gcp_authenticator import GCP_GCS_KEY
24+
from tests.test_utils.gcp_system_helpers import CLOUD_DAG_FOLDER, GoogleSystemTest, provide_gcp_context
25+
26+
27+
@pytest.mark.backend("mysql", "postgres")
28+
@pytest.mark.credential_file(GCP_GCS_KEY)
29+
class LocalFilesystemToGCSOperatorExampleDagsTest(GoogleSystemTest):
30+
31+
@provide_gcp_context(GCP_GCS_KEY)
32+
def setUp(self):
33+
super().setUp()
34+
self.create_gcs_bucket(BUCKET_NAME)
35+
with open(PATH_TO_UPLOAD_FILE, 'w+') as file:
36+
file.writelines(['example test files'])
37+
38+
@provide_gcp_context(GCP_GCS_KEY)
39+
def tearDown(self):
40+
self.delete_gcs_bucket(BUCKET_NAME)
41+
os.remove(PATH_TO_UPLOAD_FILE)
42+
super().tearDown()
43+
44+
@provide_gcp_context(GCP_GCS_KEY)
45+
def test_run_example_dag(self):
46+
self.run_dag('example_local_to_gcs', CLOUD_DAG_FOLDER)

tests/test_project_structure.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -135,7 +135,6 @@ class TestGoogleProviderProjectStructure(unittest.TestCase):
135135
('cloud', 'cassandra_to_gcs'),
136136
('cloud', 'mysql_to_gcs'),
137137
('cloud', 'mssql_to_gcs'),
138-
('cloud', 'local_to_gcs'),
139138
}
140139

141140
MISSING_DOC_GUIDES = {
@@ -150,7 +149,6 @@ class TestGoogleProviderProjectStructure(unittest.TestCase):
150149
'dlp',
151150
'gcs_to_bigquery',
152151
'kubernetes_engine',
153-
'local_to_gcs',
154152
'mlengine',
155153
'mssql_to_gcs',
156154
'mysql_to_gcs',

0 commit comments

Comments
 (0)