Skip to content

Commit f593161

Browse files
authored
feat: Onboard NASA wildfire (#275)
1 parent 16021b6 commit f593161

File tree

10 files changed

+616
-0
lines changed

10 files changed

+616
-0
lines changed
Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
/**
2+
* Copyright 2021 Google LLC
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* http://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
*/
16+
17+
18+
resource "google_bigquery_dataset" "nasa_wildfire" {
19+
dataset_id = "nasa_wildfire"
20+
project = var.project_id
21+
description = "Past Week dataset"
22+
}
23+
24+
output "bigquery_dataset-nasa_wildfire-dataset_id" {
25+
value = google_bigquery_dataset.nasa_wildfire.dataset_id
26+
}
Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,34 @@
1+
/**
2+
* Copyright 2021 Google LLC
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* http://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
*/
16+
17+
18+
resource "google_bigquery_table" "nasa_wildfire_past_week" {
19+
project = var.project_id
20+
dataset_id = "nasa_wildfire"
21+
table_id = "past_week"
22+
description = "Past Week table"
23+
depends_on = [
24+
google_bigquery_dataset.nasa_wildfire
25+
]
26+
}
27+
28+
output "bigquery_table-nasa_wildfire_past_week-table_id" {
29+
value = google_bigquery_table.nasa_wildfire_past_week.table_id
30+
}
31+
32+
output "bigquery_table-nasa_wildfire_past_week-id" {
33+
value = google_bigquery_table.nasa_wildfire_past_week.id
34+
}
Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
/**
2+
* Copyright 2021 Google LLC
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* http://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
*/
16+
17+
18+
provider "google" {
19+
project = var.project_id
20+
impersonate_service_account = var.impersonating_acct
21+
region = var.region
22+
}
23+
24+
data "google_client_openid_userinfo" "me" {}
25+
26+
output "impersonating-account" {
27+
value = data.google_client_openid_userinfo.me.email
28+
}
Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
/**
2+
* Copyright 2021 Google LLC
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* http://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
*/
16+
17+
18+
variable "project_id" {}
19+
variable "bucket_name_prefix" {}
20+
variable "impersonating_acct" {}
21+
variable "region" {}
22+
variable "env" {}
23+
Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,37 @@
1+
# Copyright 2021 Google LLC
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
15+
# The base image for this build
16+
FROM python:3.8
17+
18+
# Allow statements and log messages to appear in Cloud logs
19+
ENV PYTHONUNBUFFERED True
20+
21+
# Copy the requirements file into the image
22+
COPY requirements.txt ./
23+
24+
# Install the packages specified in the requirements file
25+
RUN python3 -m pip install --no-cache-dir -r requirements.txt
26+
27+
# The WORKDIR instruction sets the working directory for any RUN, CMD,
28+
# ENTRYPOINT, COPY and ADD instructions that follow it in the Dockerfile.
29+
# If the WORKDIR doesn’t exist, it will be created even if it’s not used in
30+
# any subsequent Dockerfile instruction
31+
WORKDIR /custom
32+
33+
# Copy the specific data processing script/s in the image under /custom/*
34+
COPY ./csv_transform.py .
35+
36+
# Command to run the data processing script when the container is run
37+
CMD ["python3", "csv_transform.py"]
Lines changed: 146 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,146 @@
1+
# Copyright 2021 Google LLC
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
15+
16+
import datetime
17+
import json
18+
import logging
19+
import os
20+
import pathlib
21+
import typing
22+
23+
import pandas as pd
24+
import requests
25+
from google.cloud import storage
26+
27+
28+
def main(
29+
source_url: str,
30+
source_file: pathlib.Path,
31+
target_file: pathlib.Path,
32+
target_gcs_bucket: str,
33+
target_gcs_path: str,
34+
headers: typing.List[str],
35+
rename_mappings: dict,
36+
pipeline_name: str,
37+
) -> None:
38+
logging.info(
39+
f"NASA wildfire{pipeline_name} process started at "
40+
+ str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
41+
)
42+
download_file(source_url, source_file)
43+
44+
logging.info("Reading file ...")
45+
df = pd.read_csv(str(source_file))
46+
47+
rename_headers(df, rename_mappings)
48+
change_type_str(df, "acq_time")
49+
df["acq_time"] = convert_datetime(df["acq_time"])
50+
change_date_time(df, "acq_time")
51+
column_creation(df, "acquisition_timestamp")
52+
53+
logging.info("Transform: Reordering headers..")
54+
df = df[headers]
55+
56+
logging.info(f"Saving to output file.. {target_file}")
57+
try:
58+
save_to_new_file(df, file_path=str(target_file))
59+
except Exception as e:
60+
logging.error(f"Error saving output file: {e}.")
61+
62+
logging.info(
63+
f"Uploading output file to.. gs://{target_gcs_bucket}/{target_gcs_path}"
64+
)
65+
upload_file_to_gcs(target_file, target_gcs_bucket, target_gcs_path)
66+
67+
logging.info(
68+
f"NASA Wildfire {pipeline_name} process completed at "
69+
+ str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
70+
)
71+
72+
73+
def change_type_str(df: pd.DataFrame, x: str):
74+
logging.info("Transform: Changing type to string... ")
75+
df[x] = df[x].astype(str)
76+
77+
78+
def change_date_time(df: pd.DataFrame, y: str):
79+
logging.info("Transform: Changing date time format... ")
80+
df[y] = (
81+
df[y]
82+
.apply(lambda x: x[:2] + ":" + x[2:4] + ":" + x[4:6])
83+
.apply(lambda x: datetime.datetime.strptime(x, "%H:%M:%S").time())
84+
)
85+
86+
87+
def convert_datetime(i):
88+
logging.info("Transform: Rename columns... ")
89+
for x in i:
90+
if len(x) == 1:
91+
x = "000" + x + "00"
92+
elif len(x) == 2:
93+
x = "00" + x + "00"
94+
elif len(x) == 3:
95+
x = "0" + x + "00"
96+
else:
97+
x = x + "00"
98+
return x
99+
100+
101+
def column_creation(df: pd.DataFrame, x: str):
102+
df[x] = df["acq_date"].astype(str) + " " + df["acq_time"].astype(str)
103+
104+
105+
def rename_headers(df: pd.DataFrame, rename_mappings: dict) -> None:
106+
df.rename(columns=rename_mappings, inplace=True)
107+
108+
109+
def save_to_new_file(df: pd.DataFrame, file_path: str) -> None:
110+
df.to_csv(file_path, index=False)
111+
112+
113+
def download_file(source_url: str, source_file: pathlib.Path) -> None:
114+
logging.info("Creating 'files' folder")
115+
pathlib.Path("./files").mkdir(parents=True, exist_ok=True)
116+
logging.info(f"Downloading file from {source_url}...")
117+
logging.info(f"Downloading {source_url} into {source_file}")
118+
r = requests.get(source_url, stream=True)
119+
if r.status_code == 200:
120+
with open(source_file, "wb") as f:
121+
for chunk in r:
122+
f.write(chunk)
123+
else:
124+
logging.error(f"Couldn't download {source_url}: {r.text}")
125+
126+
127+
def upload_file_to_gcs(file_path: pathlib.Path, gcs_bucket: str, gcs_path: str) -> None:
128+
storage_client = storage.Client()
129+
bucket = storage_client.bucket(gcs_bucket)
130+
blob = bucket.blob(gcs_path)
131+
blob.upload_from_filename(file_path)
132+
133+
134+
if __name__ == "__main__":
135+
logging.getLogger().setLevel(logging.INFO)
136+
137+
main(
138+
source_url=os.environ["SOURCE_URL"],
139+
source_file=pathlib.Path(os.environ["SOURCE_FILE"]).expanduser(),
140+
target_file=pathlib.Path(os.environ["TARGET_FILE"]).expanduser(),
141+
target_gcs_bucket=os.environ["TARGET_GCS_BUCKET"],
142+
target_gcs_path=os.environ["TARGET_GCS_PATH"],
143+
headers=json.loads(os.environ["CSV_HEADERS"]),
144+
rename_mappings=json.loads(os.environ["RENAME_MAPPINGS"]),
145+
pipeline_name=os.environ["PIPELINE_NAME"],
146+
)
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
google-cloud-storage
2+
pandas
3+
requests
Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
# Copyright 2021 Google LLC
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
15+
dataset:
16+
name: nasa_wildfire
17+
friendly_name: nasa_wildfire
18+
description: NASA Wildfire dataset
19+
dataset_sources: ~
20+
terms_of_use: ~
21+
22+
resources:
23+
- type: bigquery_dataset
24+
dataset_id: nasa_wildfire
25+
description: Past Week dataset

0 commit comments

Comments
 (0)