Skip to content

Commit

Permalink
feat: Onboard Open Targets Platform dataset (#313)
Browse files Browse the repository at this point in the history
  • Loading branch information
gkodukula committed Apr 29, 2022
1 parent 5cd13e7 commit c5adce6
Show file tree
Hide file tree
Showing 9 changed files with 423 additions and 0 deletions.
26 changes: 26 additions & 0 deletions datasets/open_targets/infra/open_targets_dataset.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
/**
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/


resource "google_bigquery_dataset" "open_targets_platform" {
dataset_id = "open_targets_platform"
project = var.project_id
description = "Open-Targets dataset"
}

output "bigquery_dataset-open_targets_platform-dataset_id" {
value = google_bigquery_dataset.open_targets_platform.dataset_id
}
28 changes: 28 additions & 0 deletions datasets/open_targets/infra/provider.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
/**
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/


provider "google" {
project = var.project_id
impersonate_service_account = var.impersonating_acct
region = var.region
}

data "google_client_openid_userinfo" "me" {}

output "impersonating-account" {
value = data.google_client_openid_userinfo.me.email
}
26 changes: 26 additions & 0 deletions datasets/open_targets/infra/variables.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
/**
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/


variable "project_id" {}
variable "bucket_name_prefix" {}
variable "impersonating_acct" {}
variable "region" {}
variable "env" {}
variable "iam_policies" {
default = {}
}

Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

FROM python:3.8
ENV PYTHONUNBUFFERED True
COPY requirements.txt ./
RUN python3 -m pip install --no-cache-dir -r requirements.txt
WORKDIR /custom
COPY ./script.py .
CMD ["python3", "script.py"]
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
google-api-core
google-cloud-bigquery-datatransfer
protobuf
182 changes: 182 additions & 0 deletions datasets/open_targets/pipelines/_images/copy_bq_datasets/script.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,182 @@
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import logging
import operator
import os
import time
import typing

from google.api_core.exceptions import ResourceExhausted
from google.cloud import bigquery_datatransfer_v1
from google.protobuf.timestamp_pb2 import Timestamp

RETRY_DELAY = 10


class TimeoutError(Exception):
"""Raised when the BQ transfer jobs haven't all finished within the allotted time"""

pass


def main(
source_project_id: str,
target_project_id: str,
service_account: str,
transfer_config_name: str,
source_dataset_name: str,
target_dataset_name: str,
timeout: int,
):
client = bigquery_datatransfer_v1.DataTransferServiceClient()
transfer_config_prefix = f"{transfer_config_name}-copy"
transfer_configs = client.list_transfer_configs(
request=bigquery_datatransfer_v1.types.ListTransferConfigsRequest(
parent=f"projects/{target_project_id}"
)
)

existing_configs = [
config
for config in transfer_configs
if config.display_name.startswith(transfer_config_prefix)
]

_running_configs = []
dataset_id = f"{source_dataset_name}"
display_name = f"{transfer_config_prefix}-{source_dataset_name}"

_config = next(
(config for config in existing_configs if config.display_name == display_name),
None,
)
if not _config:
_config = create_transfer_config(
client,
source_project_id,
target_project_id,
dataset_id,
display_name,
source_dataset_name,
target_dataset_name,
service_account,
)

trigger_config(client, _config)
_running_configs.append(_config)

wait_for_completion(client, _running_configs, timeout)


def wait_for_completion(
client: bigquery_datatransfer_v1.DataTransferServiceClient,
running_configs: typing.List[bigquery_datatransfer_v1.types.TransferConfig],
timeout: int,
) -> None:
_start = int(time.time())

while True:
latest_runs = []
for config in running_configs:
latest_runs.append(latest_transfer_run(client, config))

logging.info(f"States: {[str(run.state) for run in latest_runs]}")

# Mark as complete when all runs have succeeded
if all([str(run.state) == "TransferState.SUCCEEDED" for run in latest_runs]):
return

# Stop the process when it's longer than the allotted time
if int(time.time()) - _start > timeout:
raise TimeoutError

time.sleep(RETRY_DELAY)


def latest_transfer_run(
client: bigquery_datatransfer_v1.DataTransferServiceClient,
config: bigquery_datatransfer_v1.types.TransferConfig,
) -> bigquery_datatransfer_v1.types.TransferRun:
transfer_runs = client.list_transfer_runs(parent=config.name)
return max(transfer_runs, key=operator.attrgetter("run_time"))


def create_transfer_config(
client: bigquery_datatransfer_v1.DataTransferServiceClient,
source_project_id: str,
target_project_id: str,
dataset_id: str,
display_name: str,
source_dataset_name: str,
target_dataset_name: str,
service_account: str,
) -> bigquery_datatransfer_v1.types.TransferConfig:
transfer_config = bigquery_datatransfer_v1.TransferConfig(
destination_dataset_id=f"{target_dataset_name}",
display_name=display_name,
data_source_id="cross_region_copy",
dataset_region="US",
params={
"source_project_id": source_project_id,
"source_dataset_id": dataset_id,
},
schedule_options=bigquery_datatransfer_v1.ScheduleOptions(
disable_auto_scheduling=True
),
)

request = bigquery_datatransfer_v1.types.CreateTransferConfigRequest(
parent=client.common_project_path(target_project_id),
transfer_config=transfer_config,
service_account_name=service_account,
)

return client.create_transfer_config(request=request)


def trigger_config(
client: bigquery_datatransfer_v1.DataTransferServiceClient,
config: bigquery_datatransfer_v1.types.TransferConfig,
) -> None:
now = time.time()
seconds = int(now)
nanos = int((now - seconds) * pow(10, 9))

try:
client.start_manual_transfer_runs(
request=bigquery_datatransfer_v1.types.StartManualTransferRunsRequest(
parent=config.name,
requested_run_time=Timestamp(seconds=seconds, nanos=nanos),
)
)
except ResourceExhausted:
logging.info(
f"Transfer job is currently running for config ({config.display_name}) {config.name}."
)
return


if __name__ == "__main__":
logging.getLogger().setLevel(logging.INFO)

main(
source_project_id=os.environ["SOURCE_PROJECT_ID"],
target_project_id=os.environ["TARGET_PROJECT_ID"],
service_account=os.environ["SERVICE_ACCOUNT"],
transfer_config_name=os.environ["TRANSFER_CONFIG_NAME"],
source_dataset_name=os.environ["SOURCE_DATASET_NAME"],
target_dataset_name=os.environ["TARGET_DATASET_NAME"],
timeout=int(os.getenv("TIMEOUT", 1200)),
)
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.


from airflow import DAG
from airflow.providers.cncf.kubernetes.operators import kubernetes_pod

default_args = {
"owner": "Google",
"depends_on_past": False,
"start_date": "2021-03-23",
}


with DAG(
dag_id="open_targets.copy_platform_data",
default_args=default_args,
max_active_runs=1,
schedule_interval="@monthly",
catchup=False,
default_view="graph",
) as dag:

# Transfer Open Targets Databases
copy_bq_datasets = kubernetes_pod.KubernetesPodOperator(
task_id="copy_bq_datasets",
name="copy_bq_datasets",
namespace="composer",
service_account_name="datasets",
image_pull_policy="Always",
image="{{ var.json.open_targets.container_registry.copy_bq_datasets }}",
env_vars={
"SOURCE_PROJECT_ID": "{{ var.json.open_targets.source_project_id }}",
"TARGET_PROJECT_ID": "{{ var.json.open_targets.target_project_id }}",
"SERVICE_ACCOUNT": "{{ var.json.open_targets.service_account }}",
"TRANSFER_CONFIG_NAME": "open-targets",
"SOURCE_DATASET_NAME": "{{ var.json.open_targets.platform.source_dataset_name }}",
"TARGET_DATASET_NAME": "{{ var.json.open_targets.platform.target_dataset_name }}",
},
resources={
"request_memory": "128M",
"request_cpu": "200m",
"request_ephemeral_storage": "5G",
},
)

copy_bq_datasets

0 comments on commit c5adce6

Please sign in to comment.