Skip to content

Commit

Permalink
[AIRFLOW-7081] Remove env variables from GCP guide (#7755)
Browse files Browse the repository at this point in the history
  • Loading branch information
mik-laj committed Mar 18, 2020
1 parent 4e626be commit 73305c7
Show file tree
Hide file tree
Showing 25 changed files with 29 additions and 660 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,6 @@
from airflow.providers.google.cloud.sensors.bigtable import BigtableTableReplicationCompletedSensor
from airflow.utils.dates import days_ago

# [START howto_operator_gcp_bigtable_args]
GCP_PROJECT_ID = getenv('GCP_PROJECT_ID', 'example-project')
CBT_INSTANCE_ID = getenv('CBT_INSTANCE_ID', 'some-instance-id')
CBT_INSTANCE_DISPLAY_NAME = getenv('CBT_INSTANCE_DISPLAY_NAME', 'Human-readable name')
Expand All @@ -69,7 +68,6 @@
CBT_CLUSTER_STORAGE_TYPE = getenv('CBT_CLUSTER_STORAGE_TYPE', '2')
CBT_TABLE_ID = getenv('CBT_TABLE_ID', 'some-table-id')
CBT_POKE_INTERVAL = getenv('CBT_POKE_INTERVAL', '60')
# [END howto_operator_gcp_bigtable_args]

default_args = {
'start_date': days_ago(1)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,14 +37,10 @@
from airflow.providers.google.cloud.operators.cloud_build import CloudBuildCreateOperator
from airflow.utils import dates

# [START howto_operator_gcp_common_variables]
GCP_PROJECT_ID = os.environ.get("GCP_PROJECT_ID", "example-project")
# [END howto_operator_gcp_common_variables]

# [START howto_operator_gcp_create_build_variables]
GCP_SOURCE_ARCHIVE_URL = os.environ.get("GCP_CLOUD_BUILD_ARCHIVE_URL", "gs://example-bucket/file")
GCP_SOURCE_REPOSITORY_NAME = os.environ.get("GCP_CLOUD_BUILD_REPOSITORY_NAME", "")
# [END howto_operator_gcp_create_build_variables]

GCP_SOURCE_ARCHIVE_URL_PARTS = urlparse(GCP_SOURCE_ARCHIVE_URL)
GCP_SOURCE_BUCKET_NAME = GCP_SOURCE_ARCHIVE_URL_PARTS.netloc
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,25 +41,19 @@
)
from airflow.utils.dates import days_ago

# [START howto_operator_cloudsql_arguments]
GCP_PROJECT_ID = os.environ.get('GCP_PROJECT_ID', 'example-project')
INSTANCE_NAME = os.environ.get('GCSQL_MYSQL_INSTANCE_NAME', 'test-mysql')
INSTANCE_NAME2 = os.environ.get('GCSQL_MYSQL_INSTANCE_NAME2', 'test-mysql2')
DB_NAME = os.environ.get('GCSQL_MYSQL_DATABASE_NAME', 'testdb')
# [END howto_operator_cloudsql_arguments]

# [START howto_operator_cloudsql_export_import_arguments]
EXPORT_URI = os.environ.get('GCSQL_MYSQL_EXPORT_URI', 'gs://bucketName/fileName')
IMPORT_URI = os.environ.get('GCSQL_MYSQL_IMPORT_URI', 'gs://bucketName/fileName')
# [END howto_operator_cloudsql_export_import_arguments]

# Bodies below represent Cloud SQL instance resources:
# https://cloud.google.com/sql/docs/mysql/admin-api/v1beta4/instances

# [START howto_operator_cloudsql_create_arguments]
FAILOVER_REPLICA_NAME = INSTANCE_NAME + "-failover-replica"
READ_REPLICA_NAME = INSTANCE_NAME + "-read-replica"
# [END howto_operator_cloudsql_create_arguments]

# [START howto_operator_cloudsql_create_body]
body = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,8 +45,6 @@
from airflow.providers.google.cloud.operators.cloud_sql import CloudSQLExecuteQueryOperator
from airflow.utils.dates import days_ago

# [START howto_operator_cloudsql_query_arguments]

GCP_PROJECT_ID = os.environ.get('GCP_PROJECT_ID', 'example-project')
GCP_REGION = os.environ.get('GCP_REGION', 'europe-west-1b')

Expand Down Expand Up @@ -89,7 +87,6 @@
'DROP TABLE TABLE_TEST2',
]

# [END howto_operator_cloudsql_query_arguments]
default_args = {
'start_date': days_ago(1)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,6 @@
)
from airflow.utils.dates import days_ago

# [START howto_operator_gcp_transfer_common_variables]
GCP_PROJECT_ID = os.environ.get('GCP_PROJECT_ID', 'example-project')
GCP_DESCRIPTION = os.environ.get('GCP_DESCRIPTION', 'description')
GCP_TRANSFER_TARGET_BUCKET = os.environ.get('GCP_TRANSFER_TARGET_BUCKET')
Expand All @@ -73,7 +72,6 @@
GCP_TRANSFER_SECOND_TARGET_BUCKET = os.environ.get(
'GCP_TRANSFER_SECOND_TARGET_BUCKET', 'gcp-transfer-second-target'
)
# [END howto_operator_gcp_transfer_common_variables]

# [START howto_operator_gcp_transfer_create_job_body_aws]
aws_to_gcs_transfer_body = {
Expand Down
13 changes: 6 additions & 7 deletions airflow/providers/google/cloud/example_dags/example_compute.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,12 +48,7 @@
'start_date': days_ago(1),
}

# [START howto_operator_gce_args_set_machine_type]
GCE_SHORT_MACHINE_TYPE_NAME = os.environ.get('GCE_SHORT_MACHINE_TYPE_NAME', 'n1-standard-1')
SET_MACHINE_TYPE_BODY = {
'machineType': 'zones/{}/machineTypes/{}'.format(GCE_ZONE, GCE_SHORT_MACHINE_TYPE_NAME)
}
# [END howto_operator_gce_args_set_machine_type]


with models.DAG(
Expand Down Expand Up @@ -99,7 +94,9 @@
project_id=GCP_PROJECT_ID,
zone=GCE_ZONE,
resource_id=GCE_INSTANCE,
body=SET_MACHINE_TYPE_BODY,
body={
'machineType': 'zones/{}/machineTypes/{}'.format(GCE_ZONE, GCE_SHORT_MACHINE_TYPE_NAME)
},
task_id='gcp_compute_set_machine_type'
)
# [END howto_operator_gce_set_machine_type]
Expand All @@ -108,7 +105,9 @@
gce_set_machine_type2 = ComputeEngineSetMachineTypeOperator(
zone=GCE_ZONE,
resource_id=GCE_INSTANCE,
body=SET_MACHINE_TYPE_BODY,
body={
'machineType': 'zones/{}/machineTypes/{}'.format(GCE_ZONE, GCE_SHORT_MACHINE_TYPE_NAME)
},
task_id='gcp_compute_set_machine_type2'
)
# [END howto_operator_gce_set_machine_type_no_project_id]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,10 +46,8 @@
)
from airflow.utils.dates import days_ago

# [START howto_operator_compute_igm_common_args]
GCP_PROJECT_ID = os.environ.get('GCP_PROJECT_ID', 'example-project')
GCE_ZONE = os.environ.get('GCE_ZONE', 'europe-west1-b')
# [END howto_operator_compute_igm_common_args]

default_args = {
'start_date': days_ago(1)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,11 +48,11 @@
overwrite_existing=True,
)

bucket = "{{ task_instance.xcom_pull('export_task')['response']['outputUrl'].split('/')[2] }}"
file = "{{ '/'.join(task_instance.xcom_pull('export_task')['response']['outputUrl'].split('/')[3:]) }}"

import_task = CloudDatastoreImportEntitiesOperator(
task_id="import_task", bucket=bucket, file=file, project_id=GCP_PROJECT_ID
task_id="import_task",
bucket="{{ task_instance.xcom_pull('export_task')['response']['outputUrl'].split('/')[2] }}",
file="{{ '/'.join(task_instance.xcom_pull('export_task')['response']['outputUrl'].split('/')[3:]) }}",
project_id=GCP_PROJECT_ID
)

export_task >> import_task
Original file line number Diff line number Diff line change
Expand Up @@ -49,16 +49,13 @@
)
from airflow.utils import dates

# [START howto_operator_gcf_common_variables]
GCP_PROJECT_ID = os.environ.get('GCP_PROJECT_ID', 'example-project')
GCP_LOCATION = os.environ.get('GCP_LOCATION', 'europe-west1')
GCF_SHORT_FUNCTION_NAME = os.environ.get('GCF_SHORT_FUNCTION_NAME', 'hello').\
replace("-", "_") # make sure there are no dashes in function name (!)
FUNCTION_NAME = 'projects/{}/locations/{}/functions/{}'.format(GCP_PROJECT_ID,
GCP_LOCATION,
GCF_SHORT_FUNCTION_NAME)
# [END howto_operator_gcf_common_variables]
# [START howto_operator_gcf_deploy_variables]
GCF_SOURCE_ARCHIVE_URL = os.environ.get('GCF_SOURCE_ARCHIVE_URL', '')
GCF_SOURCE_UPLOAD_URL = os.environ.get('GCF_SOURCE_UPLOAD_URL', '')
GCF_SOURCE_REPOSITORY = os.environ.get(
Expand All @@ -69,7 +66,6 @@
GCF_ENTRYPOINT = os.environ.get('GCF_ENTRYPOINT', 'helloWorld')
GCF_RUNTIME = 'nodejs6'
GCP_VALIDATE_BODY = os.environ.get('GCP_VALIDATE_BODY', True)
# [END howto_operator_gcf_deploy_variables]

# [START howto_operator_gcf_deploy_body]
body = {
Expand Down
2 changes: 0 additions & 2 deletions airflow/providers/google/cloud/example_dags/example_gcs.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,13 +34,11 @@

default_args = {"start_date": days_ago(1)}

# [START howto_operator_gcs_acl_args_common]
PROJECT_ID = os.environ.get("GCP_PROJECT_ID", "example-id")
BUCKET_1 = os.environ.get("GCP_GCS_BUCKET_1", "test-gcs-example-bucket")
GCS_ACL_ENTITY = os.environ.get("GCS_ACL_ENTITY", "allUsers")
GCS_ACL_BUCKET_ROLE = "OWNER"
GCS_ACL_OBJECT_ROLE = "OWNER"
# [END howto_operator_gcs_acl_args_common]

BUCKET_2 = os.environ.get("GCP_GCS_BUCKET_1", "test-gcs-example-bucket-2")

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,6 @@
)
from airflow.utils.dates import days_ago

# [START howto_operator_spanner_arguments]
GCP_PROJECT_ID = os.environ.get('GCP_PROJECT_ID', 'example-project')
GCP_SPANNER_INSTANCE_ID = os.environ.get('GCP_SPANNER_INSTANCE_ID', 'testinstance')
GCP_SPANNER_DATABASE_ID = os.environ.get('GCP_SPANNER_DATABASE_ID', 'testdatabase')
Expand All @@ -52,7 +51,6 @@
GCP_SPANNER_DISPLAY_NAME = os.environ.get('GCP_SPANNER_DISPLAY_NAME', 'Test Instance')
# OPERATION_ID should be unique per operation
OPERATION_ID = 'unique_operation_id'
# [END howto_operator_spanner_arguments]

default_args = {
'start_date': days_ago(1)
Expand Down
2 changes: 0 additions & 2 deletions airflow/providers/google/cloud/example_dags/example_speech.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,10 +32,8 @@
from airflow.providers.google.cloud.operators.translate_speech import GcpTranslateSpeechOperator
from airflow.utils import dates

# [START howto_operator_text_to_speech_env_variables]
GCP_PROJECT_ID = os.environ.get("GCP_PROJECT_ID", "example-project")
BUCKET_NAME = os.environ.get("GCP_SPEECH_TEST_BUCKET", "gcp-speech-test-bucket")
# [END howto_operator_text_to_speech_env_variables]

# [START howto_operator_text_to_speech_gcp_filename]
FILENAME = "gcp-speech-test-file"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@
"""
import os

# [START howto_operator_vision_retry_import]
from google.api_core.retry import Retry

from airflow import models
Expand All @@ -37,9 +36,6 @@
)
from airflow.utils.dates import days_ago

# [END howto_operator_vision_retry_import]


default_args = {"start_date": days_ago(1)}

# [START howto_operator_video_intelligence_os_args]
Expand Down
13 changes: 0 additions & 13 deletions airflow/providers/google/cloud/example_dags/example_vision.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,26 +65,13 @@

default_args = {'start_date': days_ago(1)}

# [START howto_operator_vision_args_common]
GCP_VISION_LOCATION = os.environ.get('GCP_VISION_LOCATION', 'europe-west1')
# [END howto_operator_vision_args_common]

# [START howto_operator_vision_product_set_explicit_id]
GCP_VISION_PRODUCT_SET_ID = os.environ.get('GCP_VISION_PRODUCT_SET_ID', 'product_set_explicit_id')
# [END howto_operator_vision_product_set_explicit_id]

# [START howto_operator_vision_product_explicit_id]
GCP_VISION_PRODUCT_ID = os.environ.get('GCP_VISION_PRODUCT_ID', 'product_explicit_id')
# [END howto_operator_vision_product_explicit_id]

# [START howto_operator_vision_reference_image_args]
GCP_VISION_REFERENCE_IMAGE_ID = os.environ.get('GCP_VISION_REFERENCE_IMAGE_ID', 'reference_image_explicit_id')
GCP_VISION_REFERENCE_IMAGE_URL = os.environ.get('GCP_VISION_REFERENCE_IMAGE_URL', 'gs://bucket/image1.jpg')
# [END howto_operator_vision_reference_image_args]

# [START howto_operator_vision_annotate_image_url]
GCP_VISION_ANNOTATE_IMAGE_URL = os.environ.get('GCP_VISION_ANNOTATE_IMAGE_URL', 'gs://bucket/image2.jpg')
# [END howto_operator_vision_annotate_image_url]

# [START howto_operator_vision_product_set]
product_set = ProductSet(display_name='My Product Set')
Expand Down
10 changes: 0 additions & 10 deletions docs/howto/operator/gcp/bigtable.rst
Original file line number Diff line number Diff line change
Expand Up @@ -30,16 +30,6 @@ Prerequisite Tasks
.. include:: _partials/prerequisite_tasks.rst


Environment variables
---------------------

All examples below rely on the following variables, which can be passed via environment variables.

.. exampleinclude:: ../../../../airflow/providers/google/cloud/example_dags/example_bigtable.py
:language: python
:start-after: [START howto_operator_gcp_bigtable_args]
:end-before: [END howto_operator_gcp_bigtable_args]

.. _howto/operator:BigtableCreateInstanceOperator:

BigtableCreateInstanceOperator
Expand Down

0 comments on commit 73305c7

Please sign in to comment.