Skip to content
Merged
Show file tree
Hide file tree
Changes from 15 commits
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
16d7b2f
docs: Add Managed Kafka Connect terraform sample
salmany Jul 23, 2025
163059e
Update tags in main.tf
salmany Jul 23, 2025
0c1763e
Merge branch 'main' into salman-mkc-code-examples
glasnt Jul 25, 2025
dec3c5a
Addressed PR comments.
salmany Jul 28, 2025
0886c4e
Add comment to clarify memory_bytes value.
salmany Jul 28, 2025
315b503
Fix incorrect Kafka cluster reference.
salmany Jul 28, 2025
be446e6
docs: Add Managed Kafka Connect terraform sample
salmany Jul 29, 2025
ad846d7
Fix typo in memory_bytes for connectors
salmany Jul 29, 2025
9599c11
Fixed formatting.
salmany Jul 29, 2025
7188e27
Fix formatting
salmany Jul 29, 2025
4fdbd4c
Merge branch 'main' into mkc-connector-examples
salmany Jul 29, 2025
30dd507
Fix snippet tags.
salmany Jul 29, 2025
274f08f
Fix tags
salmany Jul 29, 2025
8a162d8
Adding workaround for subnet resource deletion.
salmany Jul 30, 2025
5a99b76
Fixed errors and formatting.
salmany Jul 30, 2025
79d5fc0
Merge branch 'main' into mkc-connector-examples
salmany Aug 7, 2025
34292bd
Disable tests and modify subnet resources.
salmany Aug 7, 2025
bf50178
Updated connector sample configs.
salmany Aug 7, 2025
4217bbe
Fix formatting.
salmany Aug 7, 2025
7887a02
Add comment for MM connector target cluster.
salmany Aug 7, 2025
2094920
Change MM2 example to include 2 GMK clusters.
salmany Aug 11, 2025
9b249fe
Fix connector name to match id.
salmany Aug 11, 2025
7970c79
Fix connector bootstrap server addresses.
salmany Aug 11, 2025
080f2b0
Fix bootstrap server addresses and add comment.
salmany Aug 11, 2025
4571dd6
Fix whitespaces and formatting.
salmany Aug 11, 2025
91d48c7
Add newline.
salmany Aug 11, 2025
ea14a5a
Add comments for access to MM2 clusters and other fixes.
salmany Aug 13, 2025
4aa8823
Formatting fix.
salmany Aug 13, 2025
f28c331
Applying suggestions.
salmany Aug 13, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
59 changes: 59 additions & 0 deletions managedkafka/managedkafka_create_connect_cluster/main.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
/**
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

# [START managedkafka_create_connect_cluster_parent]
resource "google_managed_kafka_cluster" "default" {
project = data.google_project.default.project_id
cluster_id = "my-cluster-id"
location = "us-central1"
capacity_config {
vcpu_count = 3
memory_bytes = 3221225472 # 3 GiB
}
gcp_config {
access_config {
network_configs {
subnet = "projects/${data.google_project.default.number}/regions/us-central1/subnetworks/default"
}
}
}
}

# [START managedkafka_create_connect_cluster]
resource "google_managed_kafka_connect_cluster" "default" {
provider = google-beta
project = data.google_project.default.project_id
connect_cluster_id = "my-connect-cluster-id"
location = "us-central1"
kafka_cluster = google_managed_kafka_cluster.default.id
capacity_config {
vcpu_count = 12
memory_bytes = 12884901888 # 12 GiB
}
gcp_config {
access_config {
network_configs {
primary_subnet = "projects/${data.google_project.default.number}/regions/us-central1/subnetworks/default"
}
}
}
}
# [END managedkafka_create_connect_cluster]

data "google_project" "default" {
provider = google-beta
}
# [END managedkafka_create_connect_cluster_parent]
115 changes: 115 additions & 0 deletions managedkafka/managedkafka_create_connector_bigquery_sink/main.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@
/**
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

# [START managedkafka_create_connector_bigquery_sink_parent]

resource "google_managed_kafka_cluster" "default" {
project = data.google_project.default.project_id
cluster_id = "my-cluster-id"
location = "us-central1"
capacity_config {
vcpu_count = 3
memory_bytes = 3221225472 # 3 GiB
}
gcp_config {
access_config {
network_configs {
subnet = google_compute_subnetwork.default.id
}
}
}
}

resource "google_managed_kafka_connect_cluster" "default" {
provider = google-beta
project = data.google_project.default.project_id
connect_cluster_id = "my-connect-cluster-id"
location = "us-central1"
kafka_cluster = google_managed_kafka_cluster.default.id
capacity_config {
vcpu_count = 12
memory_bytes = 12884901888 # 12 GiB
}
gcp_config {
access_config {
network_configs {
primary_subnet = google_compute_subnetwork.default.id
}
}
}
}

# [START managedkafka_subnetwork]
resource "google_compute_subnetwork" "default" {
name = "test-subnetwork"
ip_cidr_range = "10.2.0.0/16"
region = "us-central1"
network = google_compute_network.default.id

provisioner "local-exec" {
when = destroy
command = <<-EOT
set -e
gcloud compute network-attachments list \
--filter="subnetworks:https://www.googleapis.com/compute/v1/${self.id}" \
--format="value(name)" --project="${self.project}" |
while read -r na_name; do
[[ -z "$na_name" ]] && continue
for i in {1..5}; do
gcloud compute network-attachments delete "$na_name" \
--project="${self.project}" --region="${self.region}" --quiet && break
if [[ $i -eq 5 ]]; then exit 1; fi
sleep 30
done
done
EOT
interpreter = ["bash", "-c"]
}
}

resource "google_compute_network" "default" {
name = "test-network"
auto_create_subnetworks = false
}
# [END managedkafka_subnetwork]

# [START managedkafka_create_connector_bigquery_sink]
resource "google_managed_kafka_connector" "example-bigquery-sink-connector" {
project = data.google_project.default.project_id
connector_id = "my-bigquery-sink-connector"
connect_cluster = google_managed_kafka_connect_cluster.default.connect_cluster_id
location = "us-central1"

configs = {
"name" = "my-bigquery-sink-connector"
"project" = "GCP_PROJECT_ID"
Comment on lines +82 to +83
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

These look like repeated values from the main resource args. If that's the case, you can use the dynamic project ID rather than hardcoding it here.

Suggested change
"name" = "my-bigquery-sink-connector"
"project" = "GCP_PROJECT_ID"
"name" = "my-bigquery-sink-connector"
"project" = data.google_project.default.project_id

"topics" = "GMK_TOPIC_ID"
"tasks.max" = "3"
"connector.class" = "com.wepay.kafka.connect.bigquery.BigQuerySinkConnector"
"key.converter" = "org.apache.kafka.connect.storage.StringConverter"
"value.converter" = "org.apache.kafka.connect.json.JsonConverter"
"value.converter.schemas.enable" = "false"
"defaultDataset" = "BQ_DATASET_ID"
}

provider = google-beta
}
# [END managedkafka_create_connector_bigquery_sink]

data "google_project" "default" {
}

# [END managedkafka_create_connector_bigquery_sink_parent]
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@
/**
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

# [START managedkafka_create_connector_pubsub_source_parent]

resource "google_managed_kafka_cluster" "default" {
project = data.google_project.default.project_id
cluster_id = "my-cluster-id"
location = "us-central1"
capacity_config {
vcpu_count = 3
memory_bytes = 3221225472 # 3 GiB
}
gcp_config {
access_config {
network_configs {
subnet = google_compute_subnetwork.default.id
}
}
}
}

resource "google_managed_kafka_connect_cluster" "default" {
provider = google-beta
project = data.google_project.default.project_id
connect_cluster_id = "my-connect-cluster-id"
location = "us-central1"
kafka_cluster = google_managed_kafka_cluster.default.id
capacity_config {
vcpu_count = 12
memory_bytes = 12884901888 # 12 GiB
}
gcp_config {
access_config {
network_configs {
primary_subnet = google_compute_subnetwork.default.id
}
}
}
}

# [START managedkafka_subnetwork]
resource "google_compute_subnetwork" "default" {
name = "test-subnetwork"
ip_cidr_range = "10.2.0.0/16"
region = "us-central1"
network = google_compute_network.default.id

provisioner "local-exec" {
when = destroy
command = <<-EOT
set -e
gcloud compute network-attachments list \
--filter="subnetworks:https://www.googleapis.com/compute/v1/${self.id}" \
--format="value(name)" --project="${self.project}" |
while read -r na_name; do
[[ -z "$na_name" ]] && continue
for i in {1..5}; do
gcloud compute network-attachments delete "$na_name" \
--project="${self.project}" --region="${self.region}" --quiet && break
if [[ $i -eq 5 ]]; then exit 1; fi
sleep 30
done
done
EOT
interpreter = ["bash", "-c"]
}
}

resource "google_compute_network" "default" {
name = "test-network"
auto_create_subnetworks = false
}
# [END managedkafka_subnetwork]

# [START managedkafka_create_connector_cloud_storage_sink]
resource "google_managed_kafka_connector" "example-cloud-storage-sink-connector" {
project = data.google_project.default.project_id
connector_id = "my-gcs-sink-connector"
connect_cluster = google_managed_kafka_connect_cluster.default.connect_cluster_id
location = "us-central1"

configs = {
"connector.class" = "io.aiven.kafka.connect.gcs.GcsSinkConnector"
"tasks.max" = "1"
"topics" = "GMK_TOPIC_ID"
"gcs.bucket.name" = "GCS_BUCKET_NAME"
"gcs.credentials.default" = "true"
"format.output.type" = "json"
"name" = "my-gcs-sink-connector"
"value.converter" = "org.apache.kafka.connect.json.JsonConverter"
"value.converter.schemas.enable" = "false"
"key.converter" = "org.apache.kafka.connect.storage.StringConverter"
}
provider = google-beta
}
# [END managedkafka_create_connector_cloud_storage_sink]

data "google_project" "default" {
}

# [END managedkafka_create_connector_pubsub_source_parent]
Loading