Skip to content

Commit

Permalink
Add system test for gcs_to_bigquery (#8556)
Browse files Browse the repository at this point in the history
  • Loading branch information
joppevos committed May 4, 2020
1 parent d8cb0b5 commit 67caae0
Show file tree
Hide file tree
Showing 4 changed files with 58 additions and 17 deletions.
Expand Up @@ -15,45 +15,56 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

"""
Example DAG using GCSToBigQueryOperator.
"""

import os

from airflow import models
from airflow.operators.bash import BashOperator
from airflow.providers.google.cloud.operators.bigquery import (
BigQueryCreateEmptyDatasetOperator, BigQueryDeleteDatasetOperator,
)
from airflow.providers.google.cloud.operators.gcs_to_bigquery import GCSToBigQueryOperator
from airflow.utils.dates import days_ago

DATASET_NAME = os.environ.get("GCP_DATASET_NAME", 'airflow_test')
TABLE_NAME = os.environ.get("GCP_TABLE_NAME", 'gcs_to_bq_table')

args = {
'owner': 'airflow',
'start_date': days_ago(2)
}

dag = models.DAG(
dag_id='example_gcs_to_bq_operator', default_args=args,
dag_id='example_gcs_to_bigquery_operator', default_args=args,
schedule_interval=None, tags=['example'])

create_test_dataset = BashOperator(
create_test_dataset = BigQueryCreateEmptyDatasetOperator(
task_id='create_airflow_test_dataset',
bash_command='bq mk airflow_test',
dag=dag)
dataset_id=DATASET_NAME,
dag=dag
)

# [START howto_operator_gcs_to_bq]
# [START howto_operator_gcs_to_bigquery]
load_csv = GCSToBigQueryOperator(
task_id='gcs_to_bq_example',
task_id='gcs_to_bigquery_example',
bucket='cloud-samples-data',
source_objects=['bigquery/us-states/us-states.csv'],
destination_project_dataset_table='airflow_test.gcs_to_bq_table',
destination_project_dataset_table=f"{DATASET_NAME}.{TABLE_NAME}",
schema_fields=[
{'name': 'name', 'type': 'STRING', 'mode': 'NULLABLE'},
{'name': 'post_abbr', 'type': 'STRING', 'mode': 'NULLABLE'},
],
write_disposition='WRITE_TRUNCATE',
dag=dag)
# [END howto_operator_gcs_to_bq]
# [END howto_operator_gcs_to_bigquery]

delete_test_dataset = BashOperator(
delete_test_dataset = BigQueryDeleteDatasetOperator(
task_id='delete_airflow_test_dataset',
bash_command='bq rm -rf airflow_test',
dag=dag)
dataset_id=DATASET_NAME,
delete_contents=True,
dag=dag
)

create_test_dataset >> load_csv >> delete_test_dataset
6 changes: 3 additions & 3 deletions docs/howto/operator/gcp/gcs.rst
Expand Up @@ -38,10 +38,10 @@ Use the
:class:`~airflow.providers.google.cloud.operators.gcs_to_bigquery.GCSToBigQueryOperator`
to execute a BigQuery load job.

.. exampleinclude:: ../../../../airflow/providers/google/cloud/example_dags/example_gcs_to_bq.py
.. exampleinclude:: ../../../../airflow/providers/google/cloud/example_dags/example_gcs_to_bigquery.py
:language: python
:start-after: [START howto_operator_gcs_to_bq]
:end-before: [END howto_operator_gcs_to_bq]
:start-after: [START howto_operator_gcs_to_bigquery]
:end-before: [END howto_operator_gcs_to_bigquery]

.. _howto/operator:GCSBucketCreateAclEntryOperator:

Expand Down
@@ -0,0 +1,31 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

import pytest

from tests.providers.google.cloud.utils.gcp_authenticator import GCP_BIGQUERY_KEY
from tests.test_utils.gcp_system_helpers import CLOUD_DAG_FOLDER, GoogleSystemTest, provide_gcp_context


@pytest.mark.backend("mysql", "postgres")
@pytest.mark.credential_file(GCP_BIGQUERY_KEY)
class TestGoogleCloudStorageToBigQueryExample(GoogleSystemTest):

@provide_gcp_context(GCP_BIGQUERY_KEY)
def test_run_example_dag_gcs_to_bigquery_operator(self):
self.run_dag('example_gcs_to_bigquery_operator', CLOUD_DAG_FOLDER)
1 change: 0 additions & 1 deletion tests/test_project_structure.py
Expand Up @@ -130,7 +130,6 @@ def test_keep_missing_test_files_update(self):
class TestGoogleProviderProjectStructure(unittest.TestCase):
MISSING_EXAMPLE_DAGS = {
('cloud', 'text_to_speech'),
('cloud', 'gcs_to_bigquery'),
('cloud', 'adls_to_gcs'),
('cloud', 'sql_to_gcs'),
('cloud', 's3_to_gcs'),
Expand Down

0 comments on commit 67caae0

Please sign in to comment.