diff --git a/.github/workflows/api-deployer.yml b/.github/workflows/api-deployer.yml index f533b8605..2d631bece 100644 --- a/.github/workflows/api-deployer.yml +++ b/.github/workflows/api-deployer.yml @@ -12,6 +12,9 @@ on: OAUTH2_CLIENT_SECRET: description: Oauth client secret part of the the Identity Aware Proxy configuration required: true + OP_SERVICE_ACCOUNT_TOKEN: + description: 1Password service account token + required: true inputs: ENVIRONMENT: description: API environment. Possible values prod, staging and dev @@ -280,10 +283,18 @@ jobs: echo "GLOBAL_RATE_LIMIT_REQ_PER_MINUTE=${{ inputs.GLOBAL_RATE_LIMIT_REQ_PER_MINUTE }}" >> $GITHUB_ENV echo "VALIDATOR_ENDPOINT=${{ inputs.VALIDATOR_ENDPOINT }}" >> $GITHUB_ENV + - name: Load secret from 1Password + uses: 1password/load-secrets-action@v2 + with: + export-env: true + env: + OP_SERVICE_ACCOUNT_TOKEN: ${{ secrets.OP_SERVICE_ACCOUNT_TOKEN }} + TRANSITLAND_API_KEY: "op://rbiv7rvkkrsdlpcrz3bmv7nmcu/TansitLand API Key/credential" + - name: Populate Variables run: | scripts/replace-variables.sh -in_file infra/backend.conf.rename_me -out_file infra/backend.conf -variables BUCKET_NAME,OBJECT_PREFIX - scripts/replace-variables.sh -in_file infra/vars.tfvars.rename_me -out_file infra/vars.tfvars -variables PROJECT_ID,REGION,ENVIRONMENT,DEPLOYER_SERVICE_ACCOUNT,FEED_API_IMAGE_VERSION,OAUTH2_CLIENT_ID,OAUTH2_CLIENT_SECRET,GLOBAL_RATE_LIMIT_REQ_PER_MINUTE,ARTIFACT_REPO_NAME,VALIDATOR_ENDPOINT + scripts/replace-variables.sh -in_file infra/vars.tfvars.rename_me -out_file infra/vars.tfvars -variables PROJECT_ID,REGION,ENVIRONMENT,DEPLOYER_SERVICE_ACCOUNT,FEED_API_IMAGE_VERSION,OAUTH2_CLIENT_ID,OAUTH2_CLIENT_SECRET,GLOBAL_RATE_LIMIT_REQ_PER_MINUTE,ARTIFACT_REPO_NAME,VALIDATOR_ENDPOINT,TRANSITLAND_API_KEY - uses: hashicorp/setup-terraform@v3 with: diff --git a/.github/workflows/api-dev.yml b/.github/workflows/api-dev.yml index ed0fedce5..f3738b9ec 100644 --- a/.github/workflows/api-dev.yml +++ b/.github/workflows/api-dev.yml @@ -26,6 +26,7 @@ jobs: GCP_MOBILITY_FEEDS_SA_KEY: ${{ secrets.DEV_GCP_MOBILITY_FEEDS_SA_KEY }} OAUTH2_CLIENT_ID: ${{ secrets.DEV_MOBILITY_FEEDS_OAUTH2_CLIENT_ID}} OAUTH2_CLIENT_SECRET: ${{ secrets.DEV_MOBILITY_FEEDS_OAUTH2_CLIENT_SECRET}} + OP_SERVICE_ACCOUNT_TOKEN: ${{ secrets.OP_SERVICE_ACCOUNT_TOKEN }} integration-tests: if: ${{ github.event.inputs.run_integration_tests == 'true' }} diff --git a/.github/workflows/api-prod.yml b/.github/workflows/api-prod.yml index 928f5f471..3938583e9 100644 --- a/.github/workflows/api-prod.yml +++ b/.github/workflows/api-prod.yml @@ -22,3 +22,4 @@ jobs: GCP_MOBILITY_FEEDS_SA_KEY: ${{ secrets.PROD_GCP_MOBILITY_FEEDS_SA_KEY }} OAUTH2_CLIENT_ID: ${{ secrets.PROD_MOBILITY_FEEDS_OAUTH2_CLIENT_ID}} OAUTH2_CLIENT_SECRET: ${{ secrets.PROD_MOBILITY_FEEDS_OAUTH2_CLIENT_SECRET}} + OP_SERVICE_ACCOUNT_TOKEN: ${{ secrets.OP_SERVICE_ACCOUNT_TOKEN }} \ No newline at end of file diff --git a/.github/workflows/api-qa.yml b/.github/workflows/api-qa.yml index d7dc05551..2f527f4ec 100644 --- a/.github/workflows/api-qa.yml +++ b/.github/workflows/api-qa.yml @@ -22,4 +22,4 @@ jobs: GCP_MOBILITY_FEEDS_SA_KEY: ${{ secrets.QA_GCP_MOBILITY_FEEDS_SA_KEY }} OAUTH2_CLIENT_ID: ${{ secrets.DEV_MOBILITY_FEEDS_OAUTH2_CLIENT_ID}} OAUTH2_CLIENT_SECRET: ${{ secrets.DEV_MOBILITY_FEEDS_OAUTH2_CLIENT_SECRET}} - + OP_SERVICE_ACCOUNT_TOKEN: ${{ secrets.OP_SERVICE_ACCOUNT_TOKEN }} \ No newline at end of file diff --git a/functions-python/README.md b/functions-python/README.md index c94f41a71..a95eb27ff 100644 --- a/functions-python/README.md +++ b/functions-python/README.md @@ -31,6 +31,7 @@ The function configuration file contains the following properties: - `max_instance_count`: The maximum number of function instances that can be created in response to a load. - `min_instance_count`: The minimum number of function instances that can be created in response to a load. - `available_cpu_count`: The number of CPU cores that are available to the function. +- `available_memory`: The amount of memory available to the function. # Local Setup diff --git a/functions-python/feed_sync_dispatcher_transitland/function_config.json b/functions-python/feed_sync_dispatcher_transitland/function_config.json index 99554a359..fcf05749f 100644 --- a/functions-python/feed_sync_dispatcher_transitland/function_config.json +++ b/functions-python/feed_sync_dispatcher_transitland/function_config.json @@ -2,7 +2,7 @@ "name": "feed-sync-dispatcher-transitland", "description": "Feed Sync Dispatcher for Transitland", "entry_point": "feed_sync_dispatcher_transitland", - "timeout": 540, + "timeout": 3600, "memory": "512Mi", "trigger_http": true, "include_folders": ["database_gen", "helpers"], @@ -11,9 +11,10 @@ "key": "FEEDS_DATABASE_URL" } ], - "ingress_settings": "ALLOW_INTERNAL_AND_GCLB", - "max_instance_request_concurrency": 20, - "max_instance_count": 10, + "ingress_settings": "ALLOW_ALL", + "max_instance_request_concurrency": 1, + "max_instance_count": 1, "min_instance_count": 0, - "available_cpu": 1 + "available_cpu": 1, + "available_memory": "512Mi" } diff --git a/functions-python/feed_sync_dispatcher_transitland/src/main.py b/functions-python/feed_sync_dispatcher_transitland/src/main.py index 90592f725..9f718182c 100644 --- a/functions-python/feed_sync_dispatcher_transitland/src/main.py +++ b/functions-python/feed_sync_dispatcher_transitland/src/main.py @@ -15,35 +15,34 @@ # import json -import os import logging -import time +import os import random +import time from dataclasses import dataclass, asdict from typing import Optional, List -import requests -from requests.exceptions import RequestException, HTTPError -import pandas as pd import functions_framework +import pandas as pd +import requests from google.cloud.pubsub_v1.futures import Future +from requests.exceptions import RequestException, HTTPError from sqlalchemy.orm import Session -from sqlalchemy import text +from database_gen.sqlacodegen_models import Gtfsfeed from helpers.feed_sync.feed_sync_common import FeedSyncProcessor, FeedSyncPayload from helpers.feed_sync.feed_sync_dispatcher import feed_sync_dispatcher +from helpers.logger import Logger from helpers.pub_sub import get_pubsub_client, get_execution_id # Logging configuration -logging.basicConfig( - level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s" -) +logging.basicConfig(level=logging.INFO) # Environment variables PUBSUB_TOPIC_NAME = os.getenv("PUBSUB_TOPIC_NAME") PROJECT_ID = os.getenv("PROJECT_ID") FEEDS_DATABASE_URL = os.getenv("FEEDS_DATABASE_URL") -apikey = os.getenv("TRANSITLAND_API_KEY") +TRANSITLAND_API_KEY = os.getenv("TRANSITLAND_API_KEY") TRANSITLAND_OPERATOR_URL = os.getenv("TRANSITLAND_OPERATOR_URL") TRANSITLAND_FEED_URL = os.getenv("TRANSITLAND_FEED_URL") spec = ["gtfs", "gtfs-rt"] @@ -83,11 +82,16 @@ def to_json(self): class TransitFeedSyncProcessor(FeedSyncProcessor): def check_url_status(self, url: str) -> bool: """ - Checks if a URL returns a valid response (not 404 or 500). + Checks if a URL returns a valid response status code. """ try: + logging.info(f"Checking URL: {url}") + if url is None or len(url) == 0: + logging.warning("URL is empty. Skipping check.") + return False response = requests.head(url, timeout=25) - return response.status_code not in {404, 500} + logging.info(f"URL status code: {response.status_code}") + return response.status_code < 400 except requests.RequestException as e: logging.warning(f"Failed to reach {url}: {e}") return False @@ -99,9 +103,17 @@ def process_sync( Process data synchronously to fetch, extract, combine, filter and prepare payloads for publishing to a queue based on conditions related to the data retrieved from TransitLand API. """ - feeds_data = self.get_data(TRANSITLAND_FEED_URL, apikey, spec, session) + feeds_data = self.get_data( + TRANSITLAND_FEED_URL, TRANSITLAND_API_KEY, spec, session + ) + logging.info("Fetched %s feeds from TransitLand API", len(feeds_data["feeds"])) + operators_data = self.get_data( - TRANSITLAND_OPERATOR_URL, apikey, session=session + TRANSITLAND_OPERATOR_URL, TRANSITLAND_API_KEY, session=session + ) + logging.info( + "Fetched %s operators from TransitLand API", + len(operators_data["operators"]), ) feeds = self.extract_feeds_data(feeds_data) @@ -151,12 +163,25 @@ def process_sync( .str.lower() .isin([c.lower() for c in countries_not_included]) ] + logging.info( + "Filtered out %s feeds from countries: %s", + len(df_grouped) - len(filtered_df), + countries_not_included, + ) # Filtered out URLs that return undesired status codes + filtered_df = filtered_df.drop_duplicates( + subset=["feed_url"] + ) # Drop duplicates filtered_df = filtered_df[filtered_df["feed_url"].apply(self.check_url_status)] + logging.info( + "Filtered out %s feeds with invalid URLs", + len(df_grouped) - len(filtered_df), + ) # Convert filtered DataFrame to dictionary format combined_data = filtered_df.to_dict(orient="records") + logging.info("Prepared %s feeds for publishing", len(combined_data)) payloads = [] for data in combined_data: @@ -197,7 +222,7 @@ def process_sync( def get_data( self, url, - apikey, + api_key, spec=None, session=None, max_retries=3, @@ -209,11 +234,13 @@ def get_data( Handles rate limits, retries, and error cases. Returns the parsed data as a dictionary containing feeds and operators. """ - headers = {"apikey": apikey} + headers = {"apikey": api_key} params = {"spec": spec} if spec else {} all_data = {"feeds": [], "operators": []} delay = initial_delay + response = None + logging.info("Fetching data from %s", url) while url: for attempt in range(max_retries): try: @@ -225,12 +252,17 @@ def get_data( all_data["feeds"].extend(data.get("feeds", [])) all_data["operators"].extend(data.get("operators", [])) url = data.get("meta", {}).get("next") + logging.info( + "Fetched %s feeds and %s operators", + len(all_data["feeds"]), + len(all_data["operators"]), + ) + logging.info("Next URL: %s", url) delay = initial_delay break - except (RequestException, HTTPError) as e: logging.error("Attempt %s failed: %s", attempt + 1, e) - if response.status_code == 429: + if response is not None and response.status_code == 429: logging.warning("Rate limit hit. Waiting for %s seconds", delay) time.sleep(delay + random.uniform(0, 1)) delay = min(delay * 2, max_delay) @@ -240,7 +272,9 @@ def get_data( ) return all_data else: + logging.info("Retrying in %s seconds", delay) time.sleep(delay) + logging.info("Finished fetching data.") return all_data def extract_feeds_data(self, feeds_data: dict) -> List[dict]: @@ -297,13 +331,12 @@ def check_external_id( :param source: The source to filter by (e.g., 'TLD' for TransitLand) :return: True if the feed exists, False otherwise """ - query = text( - "SELECT 1 FROM public.externalid WHERE associated_id = :external_id AND source = :source LIMIT 1" + results = ( + db_session.query(Gtfsfeed) + .filter(Gtfsfeed.externalids.any(associated_id=external_id)) + .all() ) - result = db_session.execute( - query, {"external_id": external_id, "source": source} - ).fetchone() - return result is not None + return results is not None and len(results) > 0 def get_mbd_feed_url( self, db_session: Session, external_id: str, source: str @@ -315,19 +348,12 @@ def get_mbd_feed_url( :param source: The source to filter by (e.g., 'TLD' for TransitLand) :return: feed_url in mbd if exists, otherwise None """ - query = text( - """ - SELECT f.producer_url - FROM public.feed f - JOIN public.externalid e ON f.id = e.feed_id - WHERE e.associated_id = :external_id AND e.source = :source - LIMIT 1 - """ + results = ( + db_session.query(Gtfsfeed) + .filter(Gtfsfeed.externalids.any(associated_id=external_id)) + .all() ) - result = db_session.execute( - query, {"external_id": external_id, "source": source} - ).fetchone() - return result[0] if result else None + return results[0].producer_url if results else None def publish_callback( self, future: Future, payload: FeedSyncPayload, topic_path: str @@ -350,6 +376,7 @@ def feed_sync_dispatcher_transitland(request): """ HTTP Function entry point queries the transitland API and publishes events to a Pub/Sub topic to be processed. """ + Logger.init_logger() publisher = get_pubsub_client() topic_path = publisher.topic_path(PROJECT_ID, PUBSUB_TOPIC_NAME) transit_land_feed_sync_processor = TransitFeedSyncProcessor() diff --git a/functions-python/feed_sync_dispatcher_transitland/tests/test_feed_sync.py b/functions-python/feed_sync_dispatcher_transitland/tests/test_feed_sync.py index 470ce5115..04ec418aa 100644 --- a/functions-python/feed_sync_dispatcher_transitland/tests/test_feed_sync.py +++ b/functions-python/feed_sync_dispatcher_transitland/tests/test_feed_sync.py @@ -2,6 +2,8 @@ from unittest.mock import Mock, patch, call from requests import Session as RequestsSession from sqlalchemy.orm import Session as DBSession + +from database_gen.sqlacodegen_models import Gtfsfeed from feed_sync_dispatcher_transitland.src.main import ( TransitFeedSyncProcessor, FeedSyncPayload, @@ -90,24 +92,24 @@ def test_extract_operators_data(processor): def test_check_external_id(processor): mock_db_session = Mock(spec=DBSession) - mock_db_session.execute.return_value.fetchone.return_value = (1,) + mock_db_session.query.return_value.filter.return_value.all.return_value = (1,) result = processor.check_external_id(mock_db_session, "onestop1", "TLD") assert result is True - mock_db_session.execute.return_value.fetchone.return_value = None + mock_db_session.query.return_value.filter.return_value.all.return_value = None result = processor.check_external_id(mock_db_session, "onestop2", "TLD") assert result is False def test_get_mbd_feed_url(processor): mock_db_session = Mock(spec=DBSession) - mock_db_session.execute.return_value.fetchone.return_value = ( - "http://example.com/feed1", - ) + mock_db_session.query.return_value.filter.return_value.all.return_value = [ + Gtfsfeed(producer_url="http://example.com/feed1") + ] result = processor.get_mbd_feed_url(mock_db_session, "onestop1", "TLD") assert result == "http://example.com/feed1" - mock_db_session.execute.return_value.fetchone.return_value = None + mock_db_session.query.return_value.filter.return_value.all.return_value = None result = processor.get_mbd_feed_url(mock_db_session, "onestop2", "TLD") assert result is None @@ -343,7 +345,7 @@ def test_get_data_retries(processor): with patch("time.sleep", return_value=None) as mock_sleep: result = processor.get_data( url="http://example.com", - apikey="dummy_api_key", + api_key="dummy_api_key", session=mock_session, max_retries=3, initial_delay=1, diff --git a/functions-python/helpers/feed_sync/feed_sync_dispatcher.py b/functions-python/helpers/feed_sync/feed_sync_dispatcher.py index bb296968b..594fac806 100644 --- a/functions-python/helpers/feed_sync/feed_sync_dispatcher.py +++ b/functions-python/helpers/feed_sync/feed_sync_dispatcher.py @@ -35,7 +35,7 @@ def feed_sync_dispatcher( """ publisher = get_pubsub_client() try: - session = start_db_session(os.getenv("FEEDS_DATABASE_URL")) + session = start_db_session(os.getenv("FEEDS_DATABASE_URL"), echo=False) payloads = feed_sync_processor.process_sync(session, execution_id) except Exception as error: logging.error(f"Error processing feeds sync: {error}") @@ -47,7 +47,7 @@ def feed_sync_dispatcher( for payload in payloads: data_str = json.dumps(payload.payload.__dict__) - print(f"Publishing {data_str} to {pubsub_topic_path}.") + logging.info(f"Publishing {data_str} to {pubsub_topic_path}.") future = publish(publisher, pubsub_topic_path, data_str.encode("utf-8")) future.add_done_callback( lambda _: feed_sync_processor.publish_callback( diff --git a/infra/functions-python/main.tf b/infra/functions-python/main.tf index ddf768c82..25370b9a1 100644 --- a/infra/functions-python/main.tf +++ b/infra/functions-python/main.tf @@ -33,6 +33,9 @@ locals { function_gbfs_validation_report_config = jsondecode(file("${path.module}/../../functions-python/gbfs_validator/function_config.json")) function_gbfs_validation_report_zip = "${path.module}/../../functions-python/gbfs_validator/.dist/gbfs_validator.zip" + + function_feed_sync_dispatcher_transitland_config = jsondecode(file("${path.module}/../../functions-python/feed_sync_dispatcher_transitland/function_config.json")) + function_feed_sync_dispatcher_transitland_zip = "${path.module}/../../functions-python/feed_sync_dispatcher_transitland/.dist/feed_sync_dispatcher_transitland.zip" } locals { @@ -106,6 +109,13 @@ resource "google_storage_bucket_object" "gbfs_validation_report_zip" { source = local.function_gbfs_validation_report_zip } +# 6. Feed sync dispatcher transitland +resource "google_storage_bucket_object" "feed_sync_dispatcher_transitland_zip" { + bucket = google_storage_bucket.functions_bucket.name + name = "feed-sync-dispatcher-transitland-${substr(filebase64sha256(local.function_feed_sync_dispatcher_transitland_zip), 0, 10)}.zip" + source = local.function_feed_sync_dispatcher_transitland_zip +} + # Secrets access resource "google_secret_manager_secret_iam_member" "secret_iam_member" { for_each = local.unique_secret_keys @@ -377,7 +387,7 @@ resource "google_cloudfunctions2_function" "update_validation_report" { ENV = var.environment MAX_RETRY = 10 BATCH_SIZE = 5 - WEB_VALIDATOR_URL = var.web_validator_url + WEB_VALIDATOR_URL = var.validator_endpoint # prevents multiline logs from being truncated on GCP console PYTHONNODEBUGRANGES = 0 } @@ -520,6 +530,59 @@ resource "google_cloudfunctions2_function" "gbfs_validator_pubsub" { } } +# 6. functions/feed_sync_dispatcher_transitland cloud function +# 6.1 Create Pub/Sub topic +resource "google_pubsub_topic" "transitland_feeds_dispatch" { + name = "transitland-feeds-dispatch" +} +resource "google_cloudfunctions2_function" "feed_sync_dispatcher_transitland" { + name = "${local.function_feed_sync_dispatcher_transitland_config.name}-batch" + description = local.function_feed_sync_dispatcher_transitland_config.description + location = var.gcp_region + depends_on = [google_project_iam_member.event-receiving, google_secret_manager_secret_iam_member.secret_iam_member] + + build_config { + runtime = var.python_runtime + entry_point = local.function_feed_sync_dispatcher_transitland_config.entry_point + source { + storage_source { + bucket = google_storage_bucket.functions_bucket.name + object = google_storage_bucket_object.feed_sync_dispatcher_transitland_zip.name + } + } + } + service_config { + environment_variables = { + PROJECT_ID = var.project_id + PYTHONNODEBUGRANGES = 0 + PUBSUB_TOPIC_NAME = google_pubsub_topic.transitland_feeds_dispatch.name + TRANSITLAND_API_KEY=var.transitland_api_key + TRANSITLAND_OPERATOR_URL="https://transit.land/api/v2/rest/operators" + TRANSITLAND_FEED_URL="https://transit.land/api/v2/rest/feeds" + } + available_memory = local.function_feed_sync_dispatcher_transitland_config.available_memory + timeout_seconds = local.function_feed_sync_dispatcher_transitland_config.timeout + available_cpu = local.function_feed_sync_dispatcher_transitland_config.available_cpu + max_instance_request_concurrency = local.function_feed_sync_dispatcher_transitland_config.max_instance_request_concurrency + max_instance_count = local.function_feed_sync_dispatcher_transitland_config.max_instance_count + min_instance_count = local.function_feed_sync_dispatcher_transitland_config.min_instance_count + service_account_email = google_service_account.functions_service_account.email + ingress_settings = local.function_feed_sync_dispatcher_transitland_config.ingress_settings + vpc_connector = data.google_vpc_access_connector.vpc_connector.id + vpc_connector_egress_settings = "PRIVATE_RANGES_ONLY" + dynamic "secret_environment_variables" { + for_each = local.function_extract_location_config.secret_environment_variables + content { + key = secret_environment_variables.value["key"] + project_id = var.project_id + secret = "${upper(var.environment)}_${secret_environment_variables.value["key"]}" + version = "latest" + } + } + } +} + + # IAM entry for all users to invoke the function resource "google_cloudfunctions2_function_iam_member" "tokens_invoker" { project = var.project_id @@ -631,6 +694,7 @@ resource "google_pubsub_topic_iam_member" "functions_publisher" { for_each = { dataset_updates = google_pubsub_topic.dataset_updates.name validate_gbfs_feed = google_pubsub_topic.validate_gbfs_feed.name + feed_sync_dispatcher_transitland = google_pubsub_topic.transitland_feeds_dispatch.name } project = var.project_id @@ -644,6 +708,7 @@ resource "google_pubsub_topic_iam_member" "functions_subscriber" { for_each = { dataset_updates = google_pubsub_topic.dataset_updates.name validate_gbfs_feed = google_pubsub_topic.validate_gbfs_feed.name + feed_sync_dispatcher_transitland = google_pubsub_topic.transitland_feeds_dispatch.name } project = var.project_id diff --git a/infra/functions-python/vars.tf b/infra/functions-python/vars.tf index 1b6ea4f0f..c5029bdf3 100644 --- a/infra/functions-python/vars.tf +++ b/infra/functions-python/vars.tf @@ -47,7 +47,7 @@ variable "public_hosted_datasets_dns" { default = "files.mobilitydatabase.org" } -variable "web_validator_url" { +variable "validator_endpoint" { type = string description = "URL of the web validator" default = "https://stg-gtfs-validator-web-mbzoxaljzq-ue.a.run.app" @@ -64,3 +64,8 @@ variable "gbfs_scheduler_schedule" { description = "Schedule for the GBFS scheduler job" default = "0 0 1 * *" # every month on the first day at 00:00 } + +variable "transitland_api_key" { + type = string + description = "Transitland API key" +} diff --git a/infra/main.tf b/infra/main.tf index 45341c4b7..d238259dc 100644 --- a/infra/main.tf +++ b/infra/main.tf @@ -99,11 +99,14 @@ module "feed-api" { source = "./feed-api" } + module "functions-python" { source = "./functions-python" project_id = var.project_id gcp_region = var.gcp_region environment = var.environment + transitland_api_key = var.transitland_api_key + validator_endpoint = var.validator_endpoint } module "workflows" { diff --git a/infra/vars.tf b/infra/vars.tf index 6140ae67d..6dc0ebee1 100644 --- a/infra/vars.tf +++ b/infra/vars.tf @@ -62,4 +62,8 @@ variable "artifact_repo_name" { variable "validator_endpoint" { type = string description = "URL of the validator endpoint" +} + +variable "transitland_api_key" { + type = string } \ No newline at end of file diff --git a/infra/vars.tfvars.rename_me b/infra/vars.tfvars.rename_me index d1aeac5ef..6dc3bd0b5 100644 --- a/infra/vars.tfvars.rename_me +++ b/infra/vars.tfvars.rename_me @@ -16,4 +16,5 @@ oauth2_client_id = {{OAUTH2_CLIENT_ID}} oauth2_client_secret = {{OAUTH2_CLIENT_SECRET}} global_rate_limit_req_per_minute = {{GLOBAL_RATE_LIMIT_REQ_PER_MINUTE}} -validator_endpoint = {{VALIDATOR_ENDPOINT}} \ No newline at end of file +validator_endpoint = {{VALIDATOR_ENDPOINT}} +transitland_api_key = {{TRANSITLAND_API_KEY}} \ No newline at end of file