Skip to content

Commit d0f2fcb

Browse files
committed
Terraform corrections 2
1 parent 276858a commit d0f2fcb

File tree

5 files changed

+17
-15
lines changed

5 files changed

+17
-15
lines changed

functions-python/batch_process_dataset/README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ The function expects a Pub/Sub message with the following format:
3232

3333
# Function configuration
3434
The function is configured using the following environment variables:
35-
- `DATASETS_BUCKET_NANE`: The name of the bucket where the datasets are stored.
35+
- `DATASETS_BUCKET_NAME`: The name of the bucket where the datasets are stored.
3636
- `FEEDS_DATABASE_URL`: The URL of the feeds database.
3737
- `MAXIMUM_EXECUTIONS`: [Optional] The maximum number of executions per datasets. This controls the number of times a dataset can be processed per execution id. By default, is 1.
3838

functions-python/batch_process_dataset/src/main.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -308,8 +308,7 @@ def process_dataset(cloud_event: CloudEvent):
308308
Logger.init_logger()
309309
logging.info("Function Started")
310310
stable_id = "UNKNOWN"
311-
execution_id = "UNKNOWN"
312-
bucket_name = os.getenv("DATASETS_BUCKET_NANE")
311+
bucket_name = os.getenv("DATASETS_BUCKET_NAME")
313312

314313
try:
315314
# Extract data from message

functions-python/export_csv/src/main.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -324,6 +324,7 @@ def upload_file_to_storage(source_file_path, target_path):
324324
Uploads a file to the GCP bucket
325325
"""
326326
bucket_name = os.getenv("DATASETS_BUCKET_NAME")
327+
print(f"Uploading file to bucket {bucket_name} at path {target_path}")
327328
bucket = storage.Client().get_bucket(bucket_name)
328329
blob = bucket.blob(target_path)
329330
with open(source_file_path, "rb") as file:

infra/batch/main.tf

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -249,7 +249,7 @@ resource "google_cloudfunctions2_function" "pubsub_function" {
249249
vpc_connector_egress_settings = "PRIVATE_RANGES_ONLY"
250250

251251
environment_variables = {
252-
DATASETS_BUCKET_NANE = google_storage_bucket.datasets_bucket.name
252+
DATASETS_BUCKET_NAME = google_storage_bucket.datasets_bucket.name
253253
# prevents multiline logs from being truncated on GCP console
254254
PYTHONNODEBUGRANGES = 0
255255
DB_REUSE_SESSION = "True"

infra/functions-python/main.tf

Lines changed: 13 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -108,6 +108,18 @@ resource "google_storage_bucket" "gbfs_snapshots_bucket" {
108108
name = "${var.gbfs_bucket_name}-${var.environment}"
109109
}
110110

111+
resource "google_storage_bucket_iam_member" "datasets_bucket_functions_service_account" {
112+
bucket = google_storage_bucket.datasets_bucket.name
113+
role = "roles/storage.admin"
114+
member = "serviceAccount:${google_service_account.functions_service_account.email}"
115+
}
116+
117+
resource "google_project_iam_member" "datasets_bucket_functions_service_account" {
118+
project = var.project_id
119+
member = "serviceAccount:${google_service_account.functions_service_account.email}"
120+
role = "roles/storage.admin"
121+
}
122+
111123
# Cloud function source code zip files:
112124
# 1. Tokens
113125
resource "google_storage_bucket_object" "function_token_zip" {
@@ -856,8 +868,7 @@ resource "google_cloudfunctions2_function" "export_csv" {
856868
}
857869
service_config {
858870
environment_variables = {
859-
DATASETS_BUCKET_NANE = var.datasets_bucket_name
860-
QAZ = "${var.datasets_bucket_name}-${var.environment}"
871+
DATASETS_BUCKET_NAME = "${var.datasets_bucket_name}-${var.environment}"
861872
PROJECT_ID = var.project_id
862873
ENVIRONMENT = var.environment
863874
}
@@ -933,15 +944,6 @@ resource "google_project_iam_member" "event-receiving" {
933944
depends_on = [google_project_iam_member.invoking]
934945
}
935946

936-
# Grant read access to the datasets bucket for the service account
937-
resource "google_storage_bucket_iam_binding" "bucket_object_viewer" {
938-
bucket = "${var.datasets_bucket_name}-${var.environment}"
939-
role = "roles/storage.objectViewer"
940-
members = [
941-
"serviceAccount:${google_service_account.functions_service_account.email}"
942-
]
943-
}
944-
945947
# Grant write access to the gbfs bucket for the service account
946948
resource "google_storage_bucket_iam_binding" "gbfs_bucket_object_creator" {
947949
bucket = google_storage_bucket.gbfs_snapshots_bucket.name

0 commit comments

Comments
 (0)