From c3d3494ca7388dc0af7c1a2eb093cbac7307b290 Mon Sep 17 00:00:00 2001 From: Reinaldy Rafli Date: Sun, 17 Nov 2024 16:59:25 +0700 Subject: [PATCH 1/9] docs(self-hosted): experimental external kafka --- .../experimental/external-kafka.mdx | 113 ++++++++++++++++++ .../experimental/external-storage.mdx | 5 +- 2 files changed, 116 insertions(+), 2 deletions(-) create mode 100644 develop-docs/self-hosted/experimental/external-kafka.mdx diff --git a/develop-docs/self-hosted/experimental/external-kafka.mdx b/develop-docs/self-hosted/experimental/external-kafka.mdx new file mode 100644 index 0000000000000..0c9bafdda8add --- /dev/null +++ b/develop-docs/self-hosted/experimental/external-kafka.mdx @@ -0,0 +1,113 @@ +--- +title: Self Hosted External Kafka +sidebar_title: External Kafka +sidebar_order: 91 +--- + + + These are community-contributed docs. Sentry does not officially provide support for self-hosted configurations beyond the default install. + + +Kafka plays a very significant role on Sentry's infrastructure, from ingesting to processing events until they end up on ClickHouse or filesystem for permanent storage (which also depends on your event retention days). Since Kafka requires very heavy resources on the server host, and some infrastructure already have a Kafka cluster set up, it is possible to use an external Kafka cluster for Sentry. + +Sentry (the company) itself uses a Kafka cluster on production with a very tailored setup, especially for authentication. Some Kafka configuration options (such as `SASL_SSL` security protocol) might not be available for some services, but since everything is open source, you are encouraged to contribute to implement those missing things. + +If you are using authentication, make sure that the user is able to create new topics. As of now, there is no support for prefixed topic name. + + + After changing configuration files, re-run the ./install.sh script, to rebuild and restart the containers. See the configuration section for more information. + + +## Sentry + +Sentry uses the confluent-kafka library, which leverages the [default Kafka config from librdkafka](https://github.com/confluentinc/librdkafka/blob/master/CONFIGURATION.md). Modify your `sentry.conf.py` file like so: + +```python +# DEFAULT_KAFKA_OPTIONS variable is already defined in sentry.conf.py +# Make sure you don't have a duplicate variable declaration. +DEFAULT_KAFKA_OPTIONS = { + "bootstrap.servers": "kafka-node1:9092,kafka-node2:9092,kafka-node3:9092", + "message.max.bytes": 50000000, + "socket.timeout.ms": 1000, + "security.protocol": "PLAINTEXT", # Valid options are PLAINTEXT, SSL, SASL_PLAINTEXT, SASL_SSL + # If you don't use any of these options below, you can remove them or set them to `None`. + "sasl.mechanism": "PLAIN", # Valid options are PLAIN, SCRAM-SHA-256, SCRAM-SHA-512. Other mechanism might be unavailable. + "sasl.username": "username", + "sasl.password": "password", + "ssl.ca.location": "/path/to/ca.pem", + "ssl.certificate.location": "/path/to/client.pem", + "ssl.key.location": "/path/to/client.key", +} +``` + +## Snuba + +Although Snuba also uses confluent-kafka under the hood, not every configuration option is available. Modify your `docker-compose.yml` file like so: + +```yaml +x-snuba-defaults: &snuba_defaults + # ... + environment: + # ... + DEFAULT_BROKERS: "kafka-node1:9092,kafka-node2:9092,kafka-node3:9092" + KAFKA_SECURITY_PROTOCOL: "plaintext" # Valid options are PLAINTEXT, SSL, SASL_PLAINTEXT, SASL_SSL. SSL is not supported for rust-consumer. + KAFKA_SSL_CA_PATH: + KAFKA_SSL_CERT_PATH: + KAFKA_SSL_KEY_PATH: + KAFKA_SASL_MECHANISM: "PLAIN" # Valid options are PLAIN, SCRAM-SHA-256, SCRAM-SHA-512. + KAFKA_SASL_USERNAME: "username" + KAFKA_SASL_PASSWORD: "password" +``` + +If you encounter any failing startup, try to use `consumer` instead of `rust-consumer`. + +## Relay + +Modify your `relay/config.yml` file like so: + +```yaml +processing: + kafka_config: + - {name: "bootstrap.servers", value: "kafka-node1:9092,kafka-node2:9092,kafka-node3:9092"} + - {name: "message.max.bytes", value: 50000000} # 50MB + - {name: "security.protocol", value: "PLAINTEXT"} + - {name: "sasl.mechanism", value: "PLAIN"} + - {name: "sasl.username", value: "username"} + - {name: "sasl.password", value: "password"} + - {name: "ssl.ca.location", value: "/path/to/ca.pem"} + - {name: "ssl.certificate.location", value: "/path/to/client.pem"} + - {name: "ssl.key.location", value: "/path/to/client.key"} +``` + +## Vroom + +As of the time of writing, Vroom does not support any kind of authentication. + +Modify your `docker-compose.yml` file like so: + +```yaml +vroom: + # ... + environment: + # ... + SENTRY_KAFKA_BROKERS_PROFILING: "kafka-node1:9092,kafka-node2:9092,kafka-node3:9092" + SENTRY_KAFKA_BROKERS_OCCURRENCES: "kafka-node1:9092,kafka-node2:9092,kafka-node3:9092" +``` + +When [vroom#530](https://github.com/getsentry/vroom/pull/530) is merged, you can use authentication. You will need to modify your `docker-compose.yml` file like so: + +```yaml +vroom: + # ... + environment: + # ... + SENTRY_KAFKA_BROKERS_PROFILING: "kafka-node1:9092,kafka-node2:9092,kafka-node3:9092" + SENTRY_KAFKA_BROKERS_OCCURRENCES: "kafka-node1:9092,kafka-node2:9092,kafka-node3:9092" + SENTRY_KAFKA_SECURITY_PROTOCOL: "plaintext" # Valid options are PLAINTEXT, SSL, SASL_PLAINTEXT, SASL_SSL + SENTRY_KAFKA_SSL_CA_PATH: "" + SENTRY_KAFKA_SSL_CERT_PATH: "" + SENTRY_KAFKA_SSL_KEY_PATH: "" + SENTRY_KAFKA_SASL_MECHANISM: "PLAIN" # Valid options are PLAIN, SCRAM-SHA-256, SCRAM-SHA-512. + SENTRY_KAFKA_SASL_USERNAME: "username" + SENTRY_KAFKA_SASL_PASSWORD: "password" +``` diff --git a/develop-docs/self-hosted/experimental/external-storage.mdx b/develop-docs/self-hosted/experimental/external-storage.mdx index 64ba4dbe3ff1a..2b5c3e45d489e 100644 --- a/develop-docs/self-hosted/experimental/external-storage.mdx +++ b/develop-docs/self-hosted/experimental/external-storage.mdx @@ -4,11 +4,12 @@ sidebar_title: External Storage sidebar_order: 90 --- -In some cases, storing Sentry data on-disk is not really something people can do. Sometimes, it's better to offload it into some bucket storage (like AWS S3 or Google Cloud Storage). - These are community-contributed docs. Sentry does not officially provide support for self-hosted configurations beyond the default install. + +In some cases, storing Sentry data on-disk is not really something people can do. Sometimes, it's better to offload it into some bucket storage (like AWS S3 or Google Cloud Storage). + After changing configuration files, re-run the ./install.sh script, to rebuild and restart the containers. See the configuration section for more information. From 7c4158320946d62b8cab1d07c16cab417443fe82 Mon Sep 17 00:00:00 2001 From: Reinaldy Rafli Date: Thu, 21 Nov 2024 12:19:36 +0700 Subject: [PATCH 2/9] Missing `SENTRY_KAFKA_BROKERS_SPANS` config for vroom --- develop-docs/self-hosted/experimental/external-kafka.mdx | 2 ++ 1 file changed, 2 insertions(+) diff --git a/develop-docs/self-hosted/experimental/external-kafka.mdx b/develop-docs/self-hosted/experimental/external-kafka.mdx index 0c9bafdda8add..0ad5a1912f9df 100644 --- a/develop-docs/self-hosted/experimental/external-kafka.mdx +++ b/develop-docs/self-hosted/experimental/external-kafka.mdx @@ -92,6 +92,7 @@ vroom: # ... SENTRY_KAFKA_BROKERS_PROFILING: "kafka-node1:9092,kafka-node2:9092,kafka-node3:9092" SENTRY_KAFKA_BROKERS_OCCURRENCES: "kafka-node1:9092,kafka-node2:9092,kafka-node3:9092" + SENTRY_KAFKA_BROKERS_SPANS: "kafka-node1:9092,kafka-node2:9092,kafka-node3:9092" ``` When [vroom#530](https://github.com/getsentry/vroom/pull/530) is merged, you can use authentication. You will need to modify your `docker-compose.yml` file like so: @@ -103,6 +104,7 @@ vroom: # ... SENTRY_KAFKA_BROKERS_PROFILING: "kafka-node1:9092,kafka-node2:9092,kafka-node3:9092" SENTRY_KAFKA_BROKERS_OCCURRENCES: "kafka-node1:9092,kafka-node2:9092,kafka-node3:9092" + SENTRY_KAFKA_BROKERS_SPANS: "kafka-node1:9092,kafka-node2:9092,kafka-node3:9092" SENTRY_KAFKA_SECURITY_PROTOCOL: "plaintext" # Valid options are PLAINTEXT, SSL, SASL_PLAINTEXT, SASL_SSL SENTRY_KAFKA_SSL_CA_PATH: "" SENTRY_KAFKA_SSL_CERT_PATH: "" From cd33f16f1dd17a832ac601dd3f9a274cc825ff7f Mon Sep 17 00:00:00 2001 From: Reinaldy Rafli Date: Wed, 18 Dec 2024 05:32:51 +0700 Subject: [PATCH 3/9] Update external-kafka.mdx Co-authored-by: Burak Yigit Kaya --- develop-docs/self-hosted/experimental/external-kafka.mdx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/develop-docs/self-hosted/experimental/external-kafka.mdx b/develop-docs/self-hosted/experimental/external-kafka.mdx index 0ad5a1912f9df..ac2ae3d945df4 100644 --- a/develop-docs/self-hosted/experimental/external-kafka.mdx +++ b/develop-docs/self-hosted/experimental/external-kafka.mdx @@ -8,7 +8,7 @@ sidebar_order: 91 These are community-contributed docs. Sentry does not officially provide support for self-hosted configurations beyond the default install. -Kafka plays a very significant role on Sentry's infrastructure, from ingesting to processing events until they end up on ClickHouse or filesystem for permanent storage (which also depends on your event retention days). Since Kafka requires very heavy resources on the server host, and some infrastructure already have a Kafka cluster set up, it is possible to use an external Kafka cluster for Sentry. +Kafka plays a very significant role on Sentry's infrastructure, from ingesting to processing events until they end up on ClickHouse or filesystem for permanent storage. Since Kafka may require a significant amount of resources on the server it may make sense to split it from the main Sentry installation. This can be particularly appealing if you already have a managed Kafka cluster set up. Sentry (the company) itself uses a Kafka cluster on production with a very tailored setup, especially for authentication. Some Kafka configuration options (such as `SASL_SSL` security protocol) might not be available for some services, but since everything is open source, you are encouraged to contribute to implement those missing things. From 5a304345bfde4731279172057b4372c70ddf1cc4 Mon Sep 17 00:00:00 2001 From: Reinaldy Rafli Date: Wed, 18 Dec 2024 05:33:48 +0700 Subject: [PATCH 4/9] Update external-kafka.mdx Co-authored-by: Burak Yigit Kaya --- develop-docs/self-hosted/experimental/external-kafka.mdx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/develop-docs/self-hosted/experimental/external-kafka.mdx b/develop-docs/self-hosted/experimental/external-kafka.mdx index ac2ae3d945df4..6194b064044bd 100644 --- a/develop-docs/self-hosted/experimental/external-kafka.mdx +++ b/develop-docs/self-hosted/experimental/external-kafka.mdx @@ -15,7 +15,7 @@ Sentry (the company) itself uses a Kafka cluster on production with a very tailo If you are using authentication, make sure that the user is able to create new topics. As of now, there is no support for prefixed topic name. - After changing configuration files, re-run the ./install.sh script, to rebuild and restart the containers. See the configuration section for more information. + After changing the configuration files, re-run the ./install.sh script to rebuild and restart the containers. See the configuration section for more information. ## Sentry From ad3d9afc6dde4aa247d5964f824560208a42a58d Mon Sep 17 00:00:00 2001 From: Reinaldy Rafli Date: Wed, 18 Dec 2024 05:34:22 +0700 Subject: [PATCH 5/9] Update external-kafka.mdx Co-authored-by: Burak Yigit Kaya --- develop-docs/self-hosted/experimental/external-kafka.mdx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/develop-docs/self-hosted/experimental/external-kafka.mdx b/develop-docs/self-hosted/experimental/external-kafka.mdx index 6194b064044bd..a03304ef1a769 100644 --- a/develop-docs/self-hosted/experimental/external-kafka.mdx +++ b/develop-docs/self-hosted/experimental/external-kafka.mdx @@ -55,8 +55,8 @@ x-snuba-defaults: &snuba_defaults KAFKA_SSL_CERT_PATH: KAFKA_SSL_KEY_PATH: KAFKA_SASL_MECHANISM: "PLAIN" # Valid options are PLAIN, SCRAM-SHA-256, SCRAM-SHA-512. - KAFKA_SASL_USERNAME: "username" - KAFKA_SASL_PASSWORD: "password" + KAFKA_SASL_USERNAME: "" + KAFKA_SASL_PASSWORD: "" ``` If you encounter any failing startup, try to use `consumer` instead of `rust-consumer`. From 6ae0eff1a2143eaaf15d2365058549bf9d2ccb26 Mon Sep 17 00:00:00 2001 From: Reinaldy Rafli Date: Wed, 18 Dec 2024 05:36:47 +0700 Subject: [PATCH 6/9] Update external-kafka.mdx Co-authored-by: Burak Yigit Kaya --- develop-docs/self-hosted/experimental/external-kafka.mdx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/develop-docs/self-hosted/experimental/external-kafka.mdx b/develop-docs/self-hosted/experimental/external-kafka.mdx index a03304ef1a769..3185b5475c882 100644 --- a/develop-docs/self-hosted/experimental/external-kafka.mdx +++ b/develop-docs/self-hosted/experimental/external-kafka.mdx @@ -59,7 +59,7 @@ x-snuba-defaults: &snuba_defaults KAFKA_SASL_PASSWORD: "" ``` -If you encounter any failing startup, try to use `consumer` instead of `rust-consumer`. +If you encounter any failures during installation or startup, try to use `consumer` instead of `rust-consumer`. ## Relay From 5b13dff98a8f25df9d5431ca3b8f85d00813f930 Mon Sep 17 00:00:00 2001 From: Reinaldy Rafli Date: Wed, 18 Dec 2024 05:36:54 +0700 Subject: [PATCH 7/9] Update external-kafka.mdx Co-authored-by: Burak Yigit Kaya --- develop-docs/self-hosted/experimental/external-kafka.mdx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/develop-docs/self-hosted/experimental/external-kafka.mdx b/develop-docs/self-hosted/experimental/external-kafka.mdx index 3185b5475c882..ff003c51b8228 100644 --- a/develop-docs/self-hosted/experimental/external-kafka.mdx +++ b/develop-docs/self-hosted/experimental/external-kafka.mdx @@ -63,7 +63,7 @@ If you encounter any failures during installation or startup, try to use `consum ## Relay -Modify your `relay/config.yml` file like so: +Modify your `relay/config.yml` file as: ```yaml processing: From 42ace812a1bfe279e7c245549252cabbdb1b2352 Mon Sep 17 00:00:00 2001 From: Reinaldy Rafli Date: Wed, 18 Dec 2024 05:37:01 +0700 Subject: [PATCH 8/9] Update external-kafka.mdx Co-authored-by: Burak Yigit Kaya --- develop-docs/self-hosted/experimental/external-kafka.mdx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/develop-docs/self-hosted/experimental/external-kafka.mdx b/develop-docs/self-hosted/experimental/external-kafka.mdx index ff003c51b8228..4595930327d5e 100644 --- a/develop-docs/self-hosted/experimental/external-kafka.mdx +++ b/develop-docs/self-hosted/experimental/external-kafka.mdx @@ -81,7 +81,7 @@ processing: ## Vroom -As of the time of writing, Vroom does not support any kind of authentication. +At the time of writing, Vroom does not support any kind of authentication. Modify your `docker-compose.yml` file like so: From 43ffb67922603780d39eea24c2d0c6f2a3fa0c38 Mon Sep 17 00:00:00 2001 From: Reinaldy Rafli Date: Wed, 18 Dec 2024 05:37:51 +0700 Subject: [PATCH 9/9] Update external-kafka.mdx Co-authored-by: Burak Yigit Kaya --- develop-docs/self-hosted/experimental/external-kafka.mdx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/develop-docs/self-hosted/experimental/external-kafka.mdx b/develop-docs/self-hosted/experimental/external-kafka.mdx index 4595930327d5e..4995747b86529 100644 --- a/develop-docs/self-hosted/experimental/external-kafka.mdx +++ b/develop-docs/self-hosted/experimental/external-kafka.mdx @@ -110,6 +110,6 @@ vroom: SENTRY_KAFKA_SSL_CERT_PATH: "" SENTRY_KAFKA_SSL_KEY_PATH: "" SENTRY_KAFKA_SASL_MECHANISM: "PLAIN" # Valid options are PLAIN, SCRAM-SHA-256, SCRAM-SHA-512. - SENTRY_KAFKA_SASL_USERNAME: "username" - SENTRY_KAFKA_SASL_PASSWORD: "password" + SENTRY_KAFKA_SASL_USERNAME: "" + SENTRY_KAFKA_SASL_PASSWORD: "" ```