diff --git a/links/public-cloud/analytics-kafka b/links/public-cloud/analytics-kafka new file mode 100644 index 00000000000..e6181fc7e30 --- /dev/null +++ b/links/public-cloud/analytics-kafka @@ -0,0 +1,16 @@ +- [de-de](https://www.ovhcloud.com/de/public-cloud/apache-kafka/) +- [en-asia](https://www.ovhcloud.com/asia/public-cloud/apache-kafka/) +- [en-au](https://www.ovhcloud.com/en-au/public-cloud/apache-kafka/) +- [en-ca](https://www.ovhcloud.com/en-ca/public-cloud/apache-kafka/) +- [en-gb](https://www.ovhcloud.com/en-gb/public-cloud/apache-kafka/) +- [en-ie](https://www.ovhcloud.com/en-ie/public-cloud/apache-kafka/) +- [en-in](https://www.ovhcloud.com/en-in/public-cloud/apache-kafka/) +- [en-sg](https://www.ovhcloud.com/en-sg/public-cloud/apache-kafka/) +- [en-us](https://www.ovhcloud.com/en/public-cloud/apache-kafka/) +- [es-es](https://www.ovhcloud.com/es-es/public-cloud/apache-kafka/) +- [es-us](https://www.ovhcloud.com/es/public-cloud/apache-kafka/) +- [fr-ca](https://www.ovhcloud.com/fr-ca/public-cloud/apache-kafka/) +- [fr-fr](https://www.ovhcloud.com/fr/public-cloud/apache-kafka/) +- [it-it](https://www.ovhcloud.com/it/public-cloud/apache-kafka/) +- [pl-pl](https://www.ovhcloud.com/pl/public-cloud/apache-kafka/) +- [pt-pt](https://www.ovhcloud.com/pt/public-cloud/apache-kafka/) \ No newline at end of file diff --git a/links/public-cloud/prices-kafka b/links/public-cloud/prices-kafka new file mode 100644 index 00000000000..41b175ce6b6 --- /dev/null +++ b/links/public-cloud/prices-kafka @@ -0,0 +1,16 @@ +- [de-de](https://www.ovhcloud.com/de/public-cloud/prices/#7211) +- [en-asia](https://www.ovhcloud.com/asia/public-cloud/prices/#7211) +- [en-au](https://www.ovhcloud.com/en-au/public-cloud/prices/#7211) +- [en-ca](https://www.ovhcloud.com/en-ca/public-cloud/prices/#7211) +- [en-gb](https://www.ovhcloud.com/en-gb/public-cloud/prices/#7211) +- [en-ie](https://www.ovhcloud.com/en-ie/public-cloud/prices/#7211) +- [en-in](https://www.ovhcloud.com/en-in/public-cloud/prices/#7211) +- [en-sg](https://www.ovhcloud.com/en-sg/public-cloud/prices/#7211) +- [en-us](https://www.ovhcloud.com/en/public-cloud/prices/#7211) +- [es-es](https://www.ovhcloud.com/es-es/public-cloud/prices/#7211) +- [es-us](https://www.ovhcloud.com/es/public-cloud/prices/#7211) +- [fr-ca](https://www.ovhcloud.com/fr-ca/public-cloud/prices/#7211) +- [fr-fr](https://www.ovhcloud.com/fr/public-cloud/prices/#7211) +- [it-it](https://www.ovhcloud.com/it/public-cloud/prices/#7211) +- [pl-pl](https://www.ovhcloud.com/pl/public-cloud/prices/#7211) +- [pt-pt](https://www.ovhcloud.com/pt/public-cloud/prices/#7211) \ No newline at end of file diff --git a/pages/index-translations.de.yaml b/pages/index-translations.de.yaml index 9cf516c2484..199d2381bde 100644 --- a/pages/index-translations.de.yaml +++ b/pages/index-translations.de.yaml @@ -154,11 +154,14 @@ public-cloud-data-analytics-grafana: Dashboards public-cloud-data-analytics-grafana-guides: Dashboards - Guides public-cloud-data-analytics-grafana-tutorials: Dashboards - Tutorials public-cloud-data-analytics-kafka: Kafka -public-cloud-data-analytics-kafka-guides: Kafka - Guides +public-cloud-data-analytics-kafka-advanced-guides: Kafka - Advanced guides public-cloud-data-analytics-kafka-connect: Kafka Connect public-cloud-data-analytics-kafka-connect-guides: Kafka Connect - Guides +public-cloud-data-analytics-kafka-getting-started: Kafka - Getting started +public-cloud-data-analytics-kafka-guides: Kafka - User guides public-cloud-data-analytics-kafka-mirrormaker: Kafka MirrorMaker public-cloud-data-analytics-kafka-mirrormaker-guides: Kafka MirrorMaker - Guides +public-cloud-data-analytics-kafka-tutorials: Kafka - Tutorials public-cloud-databases-cassandra: Cassandra public-cloud-databases-cassandra-guides: Cassandra - Guides public-cloud-data-analytics-opensearch: OpenSearch diff --git a/pages/index-translations.es.yaml b/pages/index-translations.es.yaml index 0724fb35bd6..fda137e0f27 100755 --- a/pages/index-translations.es.yaml +++ b/pages/index-translations.es.yaml @@ -154,11 +154,14 @@ public-cloud-data-analytics-grafana: Dashboards public-cloud-data-analytics-grafana-guides: Dashboards - Guides public-cloud-data-analytics-grafana-tutorials: Dashboards - Tutorials public-cloud-data-analytics-kafka: Kafka -public-cloud-data-analytics-kafka-guides: Kafka - Guides +public-cloud-data-analytics-kafka-advanced-guides: Kafka - Advanced guides public-cloud-data-analytics-kafka-connect: Kafka Connect public-cloud-data-analytics-kafka-connect-guides: Kafka Connect - Guides +public-cloud-data-analytics-kafka-getting-started: Kafka - Getting started +public-cloud-data-analytics-kafka-guides: Kafka - User guides public-cloud-data-analytics-kafka-mirrormaker: Kafka MirrorMaker public-cloud-data-analytics-kafka-mirrormaker-guides: Kafka MirrorMaker - Guides +public-cloud-data-analytics-kafka-tutorials: Kafka - Tutorials public-cloud-databases-cassandra: Cassandra public-cloud-databases-cassandra-guides: Cassandra - Guides public-cloud-data-analytics-opensearch: OpenSearch diff --git a/pages/index-translations.fq.yaml b/pages/index-translations.fq.yaml index 5b35fb03526..0c98e27e2a7 100755 --- a/pages/index-translations.fq.yaml +++ b/pages/index-translations.fq.yaml @@ -154,11 +154,14 @@ public-cloud-data-analytics-grafana: Dashboards public-cloud-data-analytics-grafana-guides: Dashboards - Guides public-cloud-data-analytics-grafana-tutorials: Dashboards - Tutoriels public-cloud-data-analytics-kafka: Kafka -public-cloud-data-analytics-kafka-guides: Kafka - Guides +public-cloud-data-analytics-kafka-advanced-guides: Kafka - Advanced guides public-cloud-data-analytics-kafka-connect: Kafka Connect -public-cloud-data-analytics-kafka-connect-guides: Kafka Connect - Guides +public-cloud-data-analytics-kafka-connect-guides: Kafka Connect - Guides +public-cloud-data-analytics-kafka-getting-started: Kafka - Premiers pas +public-cloud-data-analytics-kafka-guides: Kafka - Guides utilisateur public-cloud-data-analytics-kafka-mirrormaker: Kafka MirrorMaker -public-cloud-data-analytics-kafka-mirrormaker-guides: Kafka MirrorMaker - Guides +public-cloud-data-analytics-kafka-mirrormaker-guides: Kafka MirrorMaker - Guides +public-cloud-data-analytics-kafka-tutorials: Kafka - Tutoriels public-cloud-databases-cassandra: Cassandra public-cloud-databases-cassandra-guides: Cassandra - Guides public-cloud-data-analytics-opensearch: OpenSearch diff --git a/pages/index-translations.fr.yaml b/pages/index-translations.fr.yaml index d6f9763695c..23caccaeba7 100755 --- a/pages/index-translations.fr.yaml +++ b/pages/index-translations.fr.yaml @@ -155,11 +155,14 @@ public-cloud-data-analytics-grafana: Dashboards public-cloud-data-analytics-grafana-guides: Dashboards - Guides public-cloud-data-analytics-grafana-tutorials: Dashboards - Tutoriels public-cloud-data-analytics-kafka: Kafka -public-cloud-data-analytics-kafka-guides: Kafka - Guides +public-cloud-data-analytics-kafka-advanced-guides: Kafka - Advanced guides public-cloud-data-analytics-kafka-connect: Kafka Connect public-cloud-data-analytics-kafka-connect-guides: Kafka Connect - Guides +public-cloud-data-analytics-kafka-getting-started: Kafka - Premiers pas +public-cloud-data-analytics-kafka-guides: Kafka - Guides utilisateur public-cloud-data-analytics-kafka-mirrormaker: Kafka MirrorMaker public-cloud-data-analytics-kafka-mirrormaker-guides: Kafka MirrorMaker - Guides +public-cloud-data-analytics-kafka-tutorials: Kafka - Tutoriels public-cloud-databases-cassandra: Cassandra public-cloud-databases-cassandra-guides: Cassandra - Guides public-cloud-data-analytics-opensearch: OpenSearch diff --git a/pages/index-translations.it.yaml b/pages/index-translations.it.yaml index 03366a579b5..30e59234b5f 100644 --- a/pages/index-translations.it.yaml +++ b/pages/index-translations.it.yaml @@ -154,11 +154,14 @@ public-cloud-data-analytics-grafana: Dashboards public-cloud-data-analytics-grafana-guides: Dashboards - Guides public-cloud-data-analytics-grafana-tutorials: Dashboards - Tutorials public-cloud-data-analytics-kafka: Kafka -public-cloud-data-analytics-kafka-guides: Kafka - Guides +public-cloud-data-analytics-kafka-advanced-guides: Kafka - Advanced guides public-cloud-data-analytics-kafka-connect: Kafka Connect public-cloud-data-analytics-kafka-connect-guides: Kafka Connect - Guides +public-cloud-data-analytics-kafka-getting-started: Kafka - Getting started +public-cloud-data-analytics-kafka-guides: Kafka - User guides public-cloud-data-analytics-kafka-mirrormaker: Kafka MirrorMaker public-cloud-data-analytics-kafka-mirrormaker-guides: Kafka MirrorMaker - Guides +public-cloud-data-analytics-kafka-tutorials: Kafka - Tutorials public-cloud-databases-cassandra: Cassandra public-cloud-databases-cassandra-guides: Cassandra - Guides public-cloud-data-analytics-opensearch: OpenSearch diff --git a/pages/index-translations.pl.yaml b/pages/index-translations.pl.yaml index e1c8c5af16c..d0d743460f9 100755 --- a/pages/index-translations.pl.yaml +++ b/pages/index-translations.pl.yaml @@ -154,11 +154,14 @@ public-cloud-data-analytics-grafana: Dashboards public-cloud-data-analytics-grafana-guides: Dashboards - Guides public-cloud-data-analytics-grafana-tutorials: Dashboards - Tutorials public-cloud-data-analytics-kafka: Kafka -public-cloud-data-analytics-kafka-guides: Kafka - Guides +public-cloud-data-analytics-kafka-advanced-guides: Kafka - Advanced guides public-cloud-data-analytics-kafka-connect: Kafka Connect public-cloud-data-analytics-kafka-connect-guides: Kafka Connect - Guides +public-cloud-data-analytics-kafka-getting-started: Kafka - Getting started +public-cloud-data-analytics-kafka-guides: Kafka - User guides public-cloud-data-analytics-kafka-mirrormaker: Kafka MirrorMaker public-cloud-data-analytics-kafka-mirrormaker-guides: Kafka MirrorMaker - Guides +public-cloud-data-analytics-kafka-tutorials: Kafka - Tutorials public-cloud-databases-cassandra: Cassandra public-cloud-databases-cassandra-guides: Cassandra - Guides public-cloud-data-analytics-opensearch: OpenSearch diff --git a/pages/index-translations.pt.yaml b/pages/index-translations.pt.yaml index 5ddfbaa6ee3..5ac43699150 100755 --- a/pages/index-translations.pt.yaml +++ b/pages/index-translations.pt.yaml @@ -154,11 +154,14 @@ public-cloud-data-analytics-grafana: Dashboards public-cloud-data-analytics-grafana-guides: Dashboards - Guides public-cloud-data-analytics-grafana-tutorials: Dashboards - Tutorials public-cloud-data-analytics-kafka: Kafka -public-cloud-data-analytics-kafka-guides: Kafka - Guides +public-cloud-data-analytics-kafka-advanced-guides: Kafka - Advanced guides public-cloud-data-analytics-kafka-connect: Kafka Connect public-cloud-data-analytics-kafka-connect-guides: Kafka Connect - Guides +public-cloud-data-analytics-kafka-getting-started: Kafka - Getting started +public-cloud-data-analytics-kafka-guides: Kafka - User guides public-cloud-data-analytics-kafka-mirrormaker: Kafka MirrorMaker public-cloud-data-analytics-kafka-mirrormaker-guides: Kafka MirrorMaker - Guides +public-cloud-data-analytics-kafka-tutorials: Kafka - Tutorials public-cloud-databases-cassandra: Cassandra public-cloud-databases-cassandra-guides: Cassandra - Guides public-cloud-data-analytics-opensearch: OpenSearch diff --git a/pages/index.md b/pages/index.md index 83e63882549..a451977fa15 100644 --- a/pages/index.md +++ b/pages/index.md @@ -1261,11 +1261,20 @@ + [Analytics - How to set up your Kubernetes database operator](public_cloud/data_analytics/analytics/analytics_kubernetes_operator) + [Analytics - How to fetch service metrics with Prometheus](public_cloud/data_analytics/analytics/analytics_metrics_via_prometheus) + [Kafka](products/public-cloud-data-analytics-kafka) - + [Guides](public-cloud-data-analytics-kafka-guides) - + [Kafka - Capabilities and Limitations](public_cloud/public_cloud_databases/kafka_01_capabilities) + + [Getting Started](public-cloud-data-analytics-kafka-getting-started) + [Kafka - Getting started](public_cloud/public_cloud_databases/kafka_02_getting_started) + + [User guides](public-cloud-data-analytics-kafka-guides) + + [Kafka - Capabilities and Limitations](public_cloud/public_cloud_databases/kafka_01_capabilities) + + [Kafka - How to create a Kafka cluster](public_cloud/data_analytics/analytics/kafka_create_cluster) + + [Kafka - How to configure your Kafka cluster to accept incoming connections](public_cloud/data_analytics/analytics/kafka_incoming_connections) + + [Kafka - How to connect to a Kafka cluster with CLI](public_cloud/data_analytics/analytics/kafka_connect_cluster_cli) + + [Kafka - How to create topics for your Kafka cluster](public_cloud/data_analytics/analytics/kafka_create_topics) + + [Kafka - How to use Access Control Lists (ACLs)](public_cloud/data_analytics/analytics/kafka_configure_acl) + + [Kafka - How to enable schema registry](public_cloud/data_analytics/analytics/kafka_enable_schema_registry) + + [Advanced guides](public-cloud-data-analytics-kafka-advanced-guides) + [Kafka - Advanced parameters references](public_cloud/public_cloud_databases/kafka_03_advanced_parameters_references) - + [Kafka - Python 101](public_cloud/public_cloud_databases/kafka_04_dev_python_basics) + + [Tutorials](public-cloud-data-analytics-kafka-tutorials) + + [Kafka - Create publisher and consumer applications](public_cloud/public_cloud_databases/kafka_04_dev_python_basics) + [Kafka Connect](products/public-cloud-data-analytics-kafka-connect) + [Guides](public-cloud-data-analytics-kafka-connect-guides) + [Kafka Connect - Capabilities and Limitations](public_cloud/public_cloud_databases/kafkaconnect_01_capabilities) diff --git a/pages/public_cloud/data_analytics/analytics/kafka_configure_acl/guide.en-gb.md b/pages/public_cloud/data_analytics/analytics/kafka_configure_acl/guide.en-gb.md new file mode 100644 index 00000000000..bf9fe2b81d5 --- /dev/null +++ b/pages/public_cloud/data_analytics/analytics/kafka_configure_acl/guide.en-gb.md @@ -0,0 +1,50 @@ +--- +title: Kafka - How to use Access Control Lists (ACLs) +excerpt: Learn how to configure Access Control Lists (ACLs) +updated: 2025-08-25 +--- + +## Objective + +Apache Kafka is an open-source, distributed event streaming platform designed for real-time, large-scale data processing with high scalability, durability, and low latency. + +This guide explains how to configure Access Control Lists (ACLs) via the OVHcloud Control Panel. + +## Requirements + +- Access to the [OVHcloud Control Panel](/links/manager) +- A [Public Cloud project](/links/public-cloud/public-cloud) in your OVHcloud account +- A [Kafka cluster running](/pages/public_cloud/data_analytics/analytics/kafka_create_cluster) on OVHcloud Public Cloud [accepting incoming connections](/pages/public_cloud/data_analytics/analytics/kafka_incoming_connections) with at least one [topic](/pages/public_cloud/data_analytics/analytics/kafka_create_topics) + +## Instructions + +### Configure ACLs on topics + +Kafka supports access control lists (ACLs) to manage permissions on topics. This approach allows you to limit the operations that are available to specific connections and to restrict access to certain data sets, which improves the security of your data. + +By default the admin user has access to all topics with admin privileges. You can define some additional ACLs for all users / topics, by clicking on the `Add an ACL`{.action} button from the `ACL`{.action} tab: + +![Enable ACLs](images/kafka_acl.v2.png){.thumbnail} + +For a particular user, and one topic (or all with '*'), define the ACL with the following permissions: + +- **admin**: full access to APIs and topic +- **read**: allow only searching and retrieving data from a topic +- **write**: allow updating, adding, and deleting data from a topic +- **readwrite**: full access to the topic + +![Define ACLs](images/kafka_add_entry1.v2.png){.thumbnail} + +*Note*: Write permission allows the service user to create new indexes that match the pattern, but it does not allow deletion of those indexes. + +When multiple rules match, they are applied in the order listed above. If no rules match, access is denied. + +## We want your feedback! + +We would love to help answer questions and appreciate any feedback you may have. + +If you need training or technical assistance to implement our solutions, contact your sales representative or click on [this link](/links/professional-services) to get a quote and ask our Professional Services experts for a custom analysis of your project. + +Are you on Discord? Connect to our channel at and interact directly with the team that builds our Analytics service! + +Join our [community of users](/links/community). \ No newline at end of file diff --git a/pages/public_cloud/data_analytics/analytics/kafka_configure_acl/images/kafka_acl.v2.png b/pages/public_cloud/data_analytics/analytics/kafka_configure_acl/images/kafka_acl.v2.png new file mode 100644 index 00000000000..a76a024e984 Binary files /dev/null and b/pages/public_cloud/data_analytics/analytics/kafka_configure_acl/images/kafka_acl.v2.png differ diff --git a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/images/kafka_add_entry1.v2.png b/pages/public_cloud/data_analytics/analytics/kafka_configure_acl/images/kafka_add_entry1.v2.png similarity index 100% rename from pages/public_cloud/public_cloud_databases/kafka_02_getting_started/images/kafka_add_entry1.v2.png rename to pages/public_cloud/data_analytics/analytics/kafka_configure_acl/images/kafka_add_entry1.v2.png diff --git a/pages/public_cloud/data_analytics/analytics/kafka_configure_acl/meta.yaml b/pages/public_cloud/data_analytics/analytics/kafka_configure_acl/meta.yaml new file mode 100644 index 00000000000..92e62cf9090 --- /dev/null +++ b/pages/public_cloud/data_analytics/analytics/kafka_configure_acl/meta.yaml @@ -0,0 +1,4 @@ +id: 68436db5-0ad3-400c-b1b9-eb137c18f6f7 +full_slug: data-analytics-kafka-configure-acl +engine: kafka +reference_category: public-cloud-data-analytics-kafka-guides \ No newline at end of file diff --git a/pages/public_cloud/data_analytics/analytics/kafka_connect_cluster_cli/guide.en-gb.md b/pages/public_cloud/data_analytics/analytics/kafka_connect_cluster_cli/guide.en-gb.md new file mode 100644 index 00000000000..20f83ccb373 --- /dev/null +++ b/pages/public_cloud/data_analytics/analytics/kafka_connect_cluster_cli/guide.en-gb.md @@ -0,0 +1,101 @@ +--- +title: Kafka - How to connect to a Kafka cluster with CLI +excerpt: Learn how to connect to a Kafka cluster using the CLI +updated: 2025-08-25 +--- + +## Objective + +Apache Kafka is an open-source, distributed event streaming platform designed for real-time, large-scale data processing with high scalability, durability, and low latency. + +This guide explains how to connect to a Kafka cluster using the CLI. + +## Requirements + +- Access to the [OVHcloud Control Panel](/links/manager) +- A [Public Cloud project](/links/public-cloud/public-cloud) in your OVHcloud account +- A [Kafka cluster running](/pages/public_cloud/data_analytics/analytics/kafka_create_cluster) on OVHcloud Public Cloud [accepting incoming connections](/pages/public_cloud/data_analytics/analytics/kafka_incoming_connections) + +## Instructions + +### First CLI connection + +> [!warning] +> Verify that the IP address visible from your browser application is part of the "Authorised IPs" defined for this Kafka service. +> +> Check also that the user has granted ACLs for the target topics. + +#### Download server and user certificates + +In order to connect to the Apache Kafka service, it is required to use server and user certificates. + +##### Server certificate + +The server CA (*Certificate Authority*) certificate can be downloaded from the `Dashboard`{.action} tab: + +![Kafka server certificate](images/kafka_get_server_certificate.v2.png){.thumbnail} + +##### User certificate and access key + +The user certificate and the user access key can be downloaded from the `Users`{.action} tab: + +![User informations](images/kafka_user_certificate_and_access_key.v2.png){.thumbnail} + +#### Install an Apache Kafka CLI + +As part of the Apache Kafka official installation, you will get different scripts that will also allow you to connect to Kafka in a Java 8+ environment: [Apache Kafka Official Quickstart](https://kafka.apache.org/quickstart). + +We propose to use a generic and more lightweight (does not require a JVM) producer and consumer client instead: `Kcat` (formerly known as `kafkacat`). + +##### **Install Kcat** + +For this client installation, please follow the instructions available at: [Kafkacat Official Github](https://github.com/edenhill/kcat). + +##### **Kcat configuration file** + +Let's create a configuration file to simplify the CLI commands to act as Kafka Producer and Consumer: + +kafkacat.conf: + +```text +bootstrap.servers=kafka-f411d2ae-f411d2ae.database.cloud.ovh.net:20186 +enable.ssl.certificate.verification=false +ssl.ca.location=/home/user/kafkacat/ca.pem +security.protocol=ssl +ssl.key.location=/home/user/kafkacat/service.key +ssl.certificate.location=/home/user/kafkacat/service.cert +``` + +In our example, the cluster address and port are **kafka-f411d2ae-f411d2ae.database.cloud.ovh.net:20186** and the previously downloaded CA certificates are in the **/home/user/kafkacat/** folder. + +Change theses values according to your own configuration. + +##### **Kafka producer** + +For this first example let's push the "test-message-key" and its "test-message-content" to the "my-topic" topic. + +```bash +echo test-message-content | kcat -F kafkacat.conf -P -t my-topic -k test-message-key +``` + +*Note*: depending on the installed binary, the CLI command can be either **kcat** or **kafkacat**. + +##### **Kafka consumer** + +The data can be retrieved from "my-topic". + +```bash +kcat -F kafkacat.conf -C -t my-topic -o -1 -e +``` + +*Note*: depending on the installed binary, the CLI command can be either **kcat** or **kafkacat**. + +## We want your feedback! + +We would love to help answer questions and appreciate any feedback you may have. + +If you need training or technical assistance to implement our solutions, contact your sales representative or click on [this link](/links/professional-services) to get a quote and ask our Professional Services experts for a custom analysis of your project. + +Are you on Discord? Connect to our channel at and interact directly with the team that builds our Analytics service! + +Join our [community of users](/links/community). \ No newline at end of file diff --git a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/images/kafka_get_server_certificate.v2.png b/pages/public_cloud/data_analytics/analytics/kafka_connect_cluster_cli/images/kafka_get_server_certificate.v2.png similarity index 100% rename from pages/public_cloud/public_cloud_databases/kafka_02_getting_started/images/kafka_get_server_certificate.v2.png rename to pages/public_cloud/data_analytics/analytics/kafka_connect_cluster_cli/images/kafka_get_server_certificate.v2.png diff --git a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/images/kafka_user_certificate_and_access_key.v2.png b/pages/public_cloud/data_analytics/analytics/kafka_connect_cluster_cli/images/kafka_user_certificate_and_access_key.v2.png similarity index 100% rename from pages/public_cloud/public_cloud_databases/kafka_02_getting_started/images/kafka_user_certificate_and_access_key.v2.png rename to pages/public_cloud/data_analytics/analytics/kafka_connect_cluster_cli/images/kafka_user_certificate_and_access_key.v2.png diff --git a/pages/public_cloud/data_analytics/analytics/kafka_connect_cluster_cli/meta.yaml b/pages/public_cloud/data_analytics/analytics/kafka_connect_cluster_cli/meta.yaml new file mode 100644 index 00000000000..5541932f89a --- /dev/null +++ b/pages/public_cloud/data_analytics/analytics/kafka_connect_cluster_cli/meta.yaml @@ -0,0 +1,4 @@ +id: 39f9dc6c-0985-4ab5-b379-1f8053f073c0 +full_slug: data-analytics-kafka-connect-cli +engine: kafka +reference_category: public-cloud-data-analytics-kafka-guides \ No newline at end of file diff --git a/pages/public_cloud/data_analytics/analytics/kafka_create_cluster/guide.en-gb.md b/pages/public_cloud/data_analytics/analytics/kafka_create_cluster/guide.en-gb.md new file mode 100644 index 00000000000..588417e3430 --- /dev/null +++ b/pages/public_cloud/data_analytics/analytics/kafka_create_cluster/guide.en-gb.md @@ -0,0 +1,93 @@ +--- +title: Kafka - How to create a Kafka cluster +excerpt: Learn how to create a Kafka cluster +updated: 2025-08-25 +--- + +## Objective + +Apache Kafka is an open-source, distributed event streaming platform designed for real-time, large-scale data processing with high scalability, durability, and low latency. + +This guide explains how to create a Kafka cluster via the OVHcloud Control Panel. + +## Requirements + +- Access to the [OVHcloud Control Panel](/links/manager) +- A [Public Cloud project](/links/public-cloud/public-cloud) in your OVHcloud account + +## Instructions + +### Subscribe to the service + +Log in to your [OVHcloud Control Panel](/links/manager) and switch to `Public Cloud`{.action} in the top navigation bar. After selecting your Public Cloud project, click on `Data Streaming`{.action} in the left-hand navigation bar under **Databases & Analytics**. + +Click the `Create a service`{.action} button. + +#### Select your analytics service + +Click on the type of analytics service you want to use and its version. +A random name is generated for your service that can change in this step or later. + +![Choose data streaming service](images/kafka_db_type.v2.png){.thumbnail} + +#### Select a datacentre + +Choose the geographical region of the datacentre where your service will be hosted and the deployment mode (1-AZ vs 3-AZ). + +![Choose a datacentre](images/kafka_region.v2.png){.thumbnail} + +#### Select a plan + +In this step, choose an appropriate service plan. If needed, you will be able to upgrade or downgrade the plan after creation. + +![Choose plan](images/kafka_solution_business.v2.png){.thumbnail} + +Please visit the [capabilities page](/products/public-cloud-data-analytics) of your selected analytics service for detailed information on each plan's properties. + +#### Select the instance + +Choose the instance type for the nodes of your service, you will be able to change it afterward. The number of nodes depends on the plan previously chosen. + +![Choose instance](images/kafka_features.v2.png){.thumbnail} + +#### Select the storage + +Storage can be scaled up to 3 time the base storage. + +![Choose storage](images/kafka_storage.v2.png){.thumbnail} + +#### Configure your options + +Choose the network options for your service and whitelist the IP addresses that will access the service. + +![Configure options](images/kafka_options.v2.png){.thumbnail} + +#### Review and confirm + +A summary of your order is display to help you review your service configuration. + +![Review order](images/kafka_configuration.1.v2.png){.thumbnail} + +The components of the price is also summarized with a monthly estimation. + +![Review pricing](images/kafka_configuration.2.v2.png){.thumbnail} + +Click the `API and Terraform equivalent`{.action} button to open the following window: + +![API and Terraform equivalent](images/kafka_configuration.3.v2.png){.thumbnail} + +The informations displayed in this window could help you automate your service creation with the [OVHcloud API](/pages/manage_and_operate/api/first-steps) or the OVHcloud Terraform Provider. + +When you are ready click the `Order`{.action} button to create your service. +In a matter of minutes, your new Apache Kafka service will be deployed. +Messages in the OVHcloud Control Panel will inform you when the streaming tool is ready to use. + +## We want your feedback! + +We would love to help answer questions and appreciate any feedback you may have. + +If you need training or technical assistance to implement our solutions, contact your sales representative or click on [this link](/links/professional-services) to get a quote and ask our Professional Services experts for a custom analysis of your project. + +Are you on Discord? Connect to our channel at and interact directly with the team that builds our Analytics service! + +Join our [community of users](/links/community). \ No newline at end of file diff --git a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/images/kafka_configuration.1.v2.png b/pages/public_cloud/data_analytics/analytics/kafka_create_cluster/images/kafka_configuration.1.v2.png similarity index 100% rename from pages/public_cloud/public_cloud_databases/kafka_02_getting_started/images/kafka_configuration.1.v2.png rename to pages/public_cloud/data_analytics/analytics/kafka_create_cluster/images/kafka_configuration.1.v2.png diff --git a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/images/kafka_configuration.2.v2.png b/pages/public_cloud/data_analytics/analytics/kafka_create_cluster/images/kafka_configuration.2.v2.png similarity index 100% rename from pages/public_cloud/public_cloud_databases/kafka_02_getting_started/images/kafka_configuration.2.v2.png rename to pages/public_cloud/data_analytics/analytics/kafka_create_cluster/images/kafka_configuration.2.v2.png diff --git a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/images/kafka_configuration.3.v2.png b/pages/public_cloud/data_analytics/analytics/kafka_create_cluster/images/kafka_configuration.3.v2.png similarity index 100% rename from pages/public_cloud/public_cloud_databases/kafka_02_getting_started/images/kafka_configuration.3.v2.png rename to pages/public_cloud/data_analytics/analytics/kafka_create_cluster/images/kafka_configuration.3.v2.png diff --git a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/images/kafka_db_type.v2.png b/pages/public_cloud/data_analytics/analytics/kafka_create_cluster/images/kafka_db_type.v2.png similarity index 100% rename from pages/public_cloud/public_cloud_databases/kafka_02_getting_started/images/kafka_db_type.v2.png rename to pages/public_cloud/data_analytics/analytics/kafka_create_cluster/images/kafka_db_type.v2.png diff --git a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/images/kafka_features.v2.png b/pages/public_cloud/data_analytics/analytics/kafka_create_cluster/images/kafka_features.v2.png similarity index 100% rename from pages/public_cloud/public_cloud_databases/kafka_02_getting_started/images/kafka_features.v2.png rename to pages/public_cloud/data_analytics/analytics/kafka_create_cluster/images/kafka_features.v2.png diff --git a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/images/kafka_options.v2.png b/pages/public_cloud/data_analytics/analytics/kafka_create_cluster/images/kafka_options.v2.png similarity index 100% rename from pages/public_cloud/public_cloud_databases/kafka_02_getting_started/images/kafka_options.v2.png rename to pages/public_cloud/data_analytics/analytics/kafka_create_cluster/images/kafka_options.v2.png diff --git a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/images/kafka_region.v2.png b/pages/public_cloud/data_analytics/analytics/kafka_create_cluster/images/kafka_region.v2.png similarity index 100% rename from pages/public_cloud/public_cloud_databases/kafka_02_getting_started/images/kafka_region.v2.png rename to pages/public_cloud/data_analytics/analytics/kafka_create_cluster/images/kafka_region.v2.png diff --git a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/images/kafka_solution_business.v2.png b/pages/public_cloud/data_analytics/analytics/kafka_create_cluster/images/kafka_solution_business.v2.png similarity index 100% rename from pages/public_cloud/public_cloud_databases/kafka_02_getting_started/images/kafka_solution_business.v2.png rename to pages/public_cloud/data_analytics/analytics/kafka_create_cluster/images/kafka_solution_business.v2.png diff --git a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/images/kafka_storage.v2.png b/pages/public_cloud/data_analytics/analytics/kafka_create_cluster/images/kafka_storage.v2.png similarity index 100% rename from pages/public_cloud/public_cloud_databases/kafka_02_getting_started/images/kafka_storage.v2.png rename to pages/public_cloud/data_analytics/analytics/kafka_create_cluster/images/kafka_storage.v2.png diff --git a/pages/public_cloud/data_analytics/analytics/kafka_create_cluster/meta.yaml b/pages/public_cloud/data_analytics/analytics/kafka_create_cluster/meta.yaml new file mode 100644 index 00000000000..789fc27a8ba --- /dev/null +++ b/pages/public_cloud/data_analytics/analytics/kafka_create_cluster/meta.yaml @@ -0,0 +1,4 @@ +id: efa694d0-1da0-42b5-ab84-447731de60c9 +full_slug: data-analytics-kafka-create-cluster +engine: kafka +reference_category: public-cloud-data-analytics-kafka-guides \ No newline at end of file diff --git a/pages/public_cloud/data_analytics/analytics/kafka_create_topics/guide.en-gb.md b/pages/public_cloud/data_analytics/analytics/kafka_create_topics/guide.en-gb.md new file mode 100644 index 00000000000..9839cceb3bb --- /dev/null +++ b/pages/public_cloud/data_analytics/analytics/kafka_create_topics/guide.en-gb.md @@ -0,0 +1,48 @@ +--- +title: Kafka - How to create topics for your Kafka cluster +excerpt: Learn how to create Kafka topics +updated: 2025-08-25 +--- + +## Objective + +Apache Kafka is an open-source, distributed event streaming platform designed for real-time, large-scale data processing with high scalability, durability, and low latency. + +This guide explains how to create Kafka topics via the OVHcloud Control Panel. + +## Requirements + +- Access to the [OVHcloud Control Panel](/links/manager) +- A [Public Cloud project](/links/public-cloud/public-cloud) in your OVHcloud account +- A [Kafka cluster running](/pages/public_cloud/data_analytics/analytics/kafka_create_cluster) on OVHcloud Public Cloud [accepting incoming connections](/pages/public_cloud/data_analytics/analytics/kafka_incoming_connections) + +## Instructions + +### Create Kafka topics + +Topics can be seen as categories, allowing you to organize your Kafka records. Producers write to topics, and consumers read from topics. + +To create Kafka topics, first go to the `Topics`{.action} tab then click on the `Add a topic`{.action} button: + +![Add a topic](images/kafka_topics.v2.png){.thumbnail} + +In advanced configuration you can change the default value for the following parameters: + +- Minimum in-sync replica (2 by default) +- Partitions (1 partition by default) +- Replication (3 brokers by default) +- Retention size in bytes (-1: no limitation by default) +- Retention time in hours (-1: no limitation by default) +- Deletion policy + +![Create a topic](images/kafka_create_topic.v2.png){.thumbnail} + +## We want your feedback! + +We would love to help answer questions and appreciate any feedback you may have. + +If you need training or technical assistance to implement our solutions, contact your sales representative or click on [this link](/links/professional-services) to get a quote and ask our Professional Services experts for a custom analysis of your project. + +Are you on Discord? Connect to our channel at and interact directly with the team that builds our Analytics service! + +Join our [community of users](/links/community). \ No newline at end of file diff --git a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/images/kafka_create_topic.v2.png b/pages/public_cloud/data_analytics/analytics/kafka_create_topics/images/kafka_create_topic.v2.png similarity index 100% rename from pages/public_cloud/public_cloud_databases/kafka_02_getting_started/images/kafka_create_topic.v2.png rename to pages/public_cloud/data_analytics/analytics/kafka_create_topics/images/kafka_create_topic.v2.png diff --git a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/images/kafka_topics.v2.png b/pages/public_cloud/data_analytics/analytics/kafka_create_topics/images/kafka_topics.v2.png similarity index 100% rename from pages/public_cloud/public_cloud_databases/kafka_02_getting_started/images/kafka_topics.v2.png rename to pages/public_cloud/data_analytics/analytics/kafka_create_topics/images/kafka_topics.v2.png diff --git a/pages/public_cloud/data_analytics/analytics/kafka_create_topics/meta.yaml b/pages/public_cloud/data_analytics/analytics/kafka_create_topics/meta.yaml new file mode 100644 index 00000000000..6c9f0c901f0 --- /dev/null +++ b/pages/public_cloud/data_analytics/analytics/kafka_create_topics/meta.yaml @@ -0,0 +1,4 @@ +id: 1897fd7e-0104-4e9e-b05b-59f71630f5ea +full_slug: data-analytics-kafka-create-topics +engine: kafka +reference_category: public-cloud-data-analytics-kafka-guides \ No newline at end of file diff --git a/pages/public_cloud/data_analytics/analytics/kafka_enable_schema_registry/guide.en-gb.md b/pages/public_cloud/data_analytics/analytics/kafka_enable_schema_registry/guide.en-gb.md new file mode 100644 index 00000000000..cd52c779cd9 --- /dev/null +++ b/pages/public_cloud/data_analytics/analytics/kafka_enable_schema_registry/guide.en-gb.md @@ -0,0 +1,37 @@ +--- +title: Kafka - How to enable schema registry +excerpt: Learn how to enable the schema registry for your Kafka cluster +updated: 2025-08-25 +--- + +## Objective + +Apache Kafka is an open-source, distributed event streaming platform designed for real-time, large-scale data processing with high scalability, durability, and low latency. + +This guide explains how to enable the schema registry for your Kafka cluster via the OVHcloud Control Panel. + +## Requirements + +- Access to the [OVHcloud Control Panel](/links/manager) +- A [Public Cloud project](/links/public-cloud/public-cloud) in your OVHcloud account +- A [Kafka cluster running](/pages/public_cloud/data_analytics/analytics/kafka_create_cluster) on OVHcloud Public Cloud + +## Instructions + +### Enable schema registry + +To enable schema registry, go to `Configuration`{.action} tab from your Kafka cluster. Then scroll to the Kafka specific section and click the schema registry toggle to enable it. + +![Enable schema registry](images/kafka_enable_schema_registry.png){.thumbnail} + +The schema registry could be disabled with the same toggle. + +## We want your feedback! + +We would love to help answer questions and appreciate any feedback you may have. + +If you need training or technical assistance to implement our solutions, contact your sales representative or click on [this link](/links/professional-services) to get a quote and ask our Professional Services experts for a custom analysis of your project. + +Are you on Discord? Connect to our channel at and interact directly with the team that builds our Analytics service! + +Join our [community of users](/links/community). \ No newline at end of file diff --git a/pages/public_cloud/data_analytics/analytics/kafka_enable_schema_registry/images/kafka_enable_schema_registry.png b/pages/public_cloud/data_analytics/analytics/kafka_enable_schema_registry/images/kafka_enable_schema_registry.png new file mode 100644 index 00000000000..ed8cb438058 Binary files /dev/null and b/pages/public_cloud/data_analytics/analytics/kafka_enable_schema_registry/images/kafka_enable_schema_registry.png differ diff --git a/pages/public_cloud/data_analytics/analytics/kafka_enable_schema_registry/meta.yaml b/pages/public_cloud/data_analytics/analytics/kafka_enable_schema_registry/meta.yaml new file mode 100644 index 00000000000..772ba71c0eb --- /dev/null +++ b/pages/public_cloud/data_analytics/analytics/kafka_enable_schema_registry/meta.yaml @@ -0,0 +1,4 @@ +id: 57dd04be-cdc2-46db-b43f-8587e2afa1f0 +full_slug: data-analytics-kafka-enable-schema-registry +engine: kafka +reference_category: public-cloud-data-analytics-kafka-guides \ No newline at end of file diff --git a/pages/public_cloud/data_analytics/analytics/kafka_incoming_connections/guide.en-gb.md b/pages/public_cloud/data_analytics/analytics/kafka_incoming_connections/guide.en-gb.md new file mode 100644 index 00000000000..2db35a2a1c5 --- /dev/null +++ b/pages/public_cloud/data_analytics/analytics/kafka_incoming_connections/guide.en-gb.md @@ -0,0 +1,70 @@ +--- +title: Kafka - How to configure your Kafka cluster to accept incoming connections +excerpt: Learn how to configure your Kafka cluster to accept incoming connections +updated: 2025-08-25 +--- + +## Objective + +Apache Kafka is an open-source, distributed event streaming platform designed for real-time, large-scale data processing with high scalability, durability, and low latency. + +This guide explains how to configure your Kafka cluster to accept incoming connections via the OVHcloud Control Panel. + +## Requirements + +- Access to the [OVHcloud Control Panel](/links/manager) +- A [Public Cloud project](/links/public-cloud/public-cloud) in your OVHcloud account +- A [Kafka cluster running](/pages/public_cloud/data_analytics/analytics/kafka_create_cluster) on OVHcloud Public Cloud + +## Instructions + +### Configure the Apache Kafka service + +Once your Kafka service is up and running, you will have to define at least one user and one authorised IP (if not already provided during the order) in order to fully connect to the service (as producer or consumer). + +The `Dashboard`{.action} tab automatically updates when your service is ready. + +![Kafka General information](images/kafka_cluster_ready_to_configure.v2.png){.thumbnail} + +#### Set up a user + +Switch to the `Users`{.action} tab. An admin user name `avnadmin` is preconfigured during the service installation. + +![Users](images/kafka_users.v2.png){.thumbnail} + +You can add more users by clicking the `Add user`{.action} button. + +![Add a user](images/kafka_add_user.v2.png){.thumbnail} + +Enter a username, then click `Create User`{.action}. + +Passwords need to be reset from the `Users`{.action} table. + +![Password reset](images/kafka_user_password_reset1.v2.png){.thumbnail} + +#### Configure authorised IPs + +> [!warning] +> For security reasons the default network configuration doesn't allow any incoming connections. It is thus critical to authorize the suitable IP addresses in order to successfully access your Kafka cluster. + +If you did not define the authorised IPs during the order you can do it in the `Configuration`{.action} tab. At least one IP address must be authorised here before you can connect to your database. + +![Authorised IP](images/kafka_authorized_ip.v2.png){.thumbnail} + +Add the IP address of your computer by using the `Current IP`{.action} button. +You will be able to remove IPs from the table afterward. + +![Add IP](images/kafka_add_ip.v2.png){.thumbnail} + +Your Apache Kafka service is now fully accessible! +Optionally, you can configure access control lists (ACL) for granular permissions and create topics, as shown below. + +## We want your feedback! + +We would love to help answer questions and appreciate any feedback you may have. + +If you need training or technical assistance to implement our solutions, contact your sales representative or click on [this link](/links/professional-services) to get a quote and ask our Professional Services experts for a custom analysis of your project. + +Are you on Discord? Connect to our channel at and interact directly with the team that builds our Analytics service! + +Join our [community of users](/links/community). \ No newline at end of file diff --git a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/images/kafka_add_ip.v2.png b/pages/public_cloud/data_analytics/analytics/kafka_incoming_connections/images/kafka_add_ip.v2.png similarity index 100% rename from pages/public_cloud/public_cloud_databases/kafka_02_getting_started/images/kafka_add_ip.v2.png rename to pages/public_cloud/data_analytics/analytics/kafka_incoming_connections/images/kafka_add_ip.v2.png diff --git a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/images/kafka_add_user.v2.png b/pages/public_cloud/data_analytics/analytics/kafka_incoming_connections/images/kafka_add_user.v2.png similarity index 100% rename from pages/public_cloud/public_cloud_databases/kafka_02_getting_started/images/kafka_add_user.v2.png rename to pages/public_cloud/data_analytics/analytics/kafka_incoming_connections/images/kafka_add_user.v2.png diff --git a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/images/kafka_authorized_ip.v2.png b/pages/public_cloud/data_analytics/analytics/kafka_incoming_connections/images/kafka_authorized_ip.v2.png similarity index 100% rename from pages/public_cloud/public_cloud_databases/kafka_02_getting_started/images/kafka_authorized_ip.v2.png rename to pages/public_cloud/data_analytics/analytics/kafka_incoming_connections/images/kafka_authorized_ip.v2.png diff --git a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/images/kafka_cluster_ready_to_configure.v2.png b/pages/public_cloud/data_analytics/analytics/kafka_incoming_connections/images/kafka_cluster_ready_to_configure.v2.png similarity index 100% rename from pages/public_cloud/public_cloud_databases/kafka_02_getting_started/images/kafka_cluster_ready_to_configure.v2.png rename to pages/public_cloud/data_analytics/analytics/kafka_incoming_connections/images/kafka_cluster_ready_to_configure.v2.png diff --git a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/images/kafka_user_password_reset1.v2.png b/pages/public_cloud/data_analytics/analytics/kafka_incoming_connections/images/kafka_user_password_reset1.v2.png similarity index 100% rename from pages/public_cloud/public_cloud_databases/kafka_02_getting_started/images/kafka_user_password_reset1.v2.png rename to pages/public_cloud/data_analytics/analytics/kafka_incoming_connections/images/kafka_user_password_reset1.v2.png diff --git a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/images/kafka_users.v2.png b/pages/public_cloud/data_analytics/analytics/kafka_incoming_connections/images/kafka_users.v2.png similarity index 100% rename from pages/public_cloud/public_cloud_databases/kafka_02_getting_started/images/kafka_users.v2.png rename to pages/public_cloud/data_analytics/analytics/kafka_incoming_connections/images/kafka_users.v2.png diff --git a/pages/public_cloud/data_analytics/analytics/kafka_incoming_connections/meta.yaml b/pages/public_cloud/data_analytics/analytics/kafka_incoming_connections/meta.yaml new file mode 100644 index 00000000000..c136e2b435c --- /dev/null +++ b/pages/public_cloud/data_analytics/analytics/kafka_incoming_connections/meta.yaml @@ -0,0 +1,4 @@ +id: d5203141-898d-4cad-b8a7-837b25e34d57 +full_slug: data-analytics-kafka-incoming-connections +engine: kafka +reference_category: public-cloud-data-analytics-kafka-guides \ No newline at end of file diff --git a/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.de-de.md b/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.de-de.md index ccbff3eabfe..689beb60a4a 100644 --- a/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.de-de.md +++ b/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.de-de.md @@ -1,7 +1,7 @@ --- title: Cassandra - Capabilities and Limitations excerpt: Discover the capabilities and limitations of Public Cloud Databases for Cassandra -updated: 2025-07-31 +updated: 2025-08-25 --- > [!warning] @@ -30,7 +30,6 @@ The Public Cloud Databases offer is available in the following regions: - `GRA` (Gravelines, France) - `SBG` (Strasbourg, France) - `SGP` (Singapore, Singapore) -- `EU-WEST-PAR` (Paris, France) - `UK` (London, United Kingdom) - `WAW` (Warsaw, Poland) diff --git a/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.en-asia.md b/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.en-asia.md index c47c254ed48..805198a6f16 100644 --- a/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.en-asia.md +++ b/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.en-asia.md @@ -1,7 +1,7 @@ --- title: Cassandra - Capabilities and Limitations excerpt: Discover the capabilities and limitations of Public Cloud Databases for Cassandra -updated: 2025-07-31 +updated: 2025-08-25 --- > [!warning] @@ -30,7 +30,6 @@ The Public Cloud Databases offer is available in the following regions: - `GRA` (Gravelines, France) - `SBG` (Strasbourg, France) - `SGP` (Singapore, Singapore) -- `EU-WEST-PAR` (Paris, France) - `UK` (London, United Kingdom) - `WAW` (Warsaw, Poland) diff --git a/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.en-au.md b/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.en-au.md index 0e344783e79..63cdc4a20df 100644 --- a/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.en-au.md +++ b/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.en-au.md @@ -1,7 +1,7 @@ --- title: Cassandra - Capabilities and Limitations excerpt: Discover the capabilities and limitations of Public Cloud Databases for Cassandra -updated: 2025-07-31 +updated: 2025-08-25 --- > [!warning] @@ -30,7 +30,6 @@ The Public Cloud Databases offer is available in the following regions: - `GRA` (Gravelines, France) - `SBG` (Strasbourg, France) - `SGP` (Singapore, Singapore) -- `EU-WEST-PAR` (Paris, France) - `UK` (London, United Kingdom) - `WAW` (Warsaw, Poland) diff --git a/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.en-ca.md b/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.en-ca.md index 99f3d66b782..2e1fc821b15 100644 --- a/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.en-ca.md +++ b/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.en-ca.md @@ -1,7 +1,7 @@ --- title: Cassandra - Capabilities and Limitations excerpt: Discover the capabilities and limitations of Public Cloud Databases for Cassandra -updated: 2025-07-31 +updated: 2025-08-25 --- > [!warning] @@ -30,7 +30,6 @@ The Public Cloud Databases offer is available in the following regions: - `GRA` (Gravelines, France) - `SBG` (Strasbourg, France) - `SGP` (Singapore, Singapore) -- `EU-WEST-PAR` (Paris, France) - `UK` (London, United Kingdom) - `WAW` (Warsaw, Poland) diff --git a/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.en-gb.md b/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.en-gb.md index e379419498b..48251f34bd0 100644 --- a/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.en-gb.md +++ b/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.en-gb.md @@ -1,7 +1,7 @@ --- title: Cassandra - Capabilities and Limitations excerpt: Discover the capabilities and limitations of Public Cloud Databases for Cassandra -updated: 2025-07-31 +updated: 2025-08-25 --- > [!warning] @@ -30,7 +30,6 @@ The Public Cloud Databases offer is available in the following regions: - `GRA` (Gravelines, France) - `SBG` (Strasbourg, France) - `SGP` (Singapore, Singapore) -- `EU-WEST-PAR` (Paris, France) - `UK` (London, United Kingdom) - `WAW` (Warsaw, Poland) diff --git a/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.en-ie.md b/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.en-ie.md index c2e4560e16c..6cfd1e81279 100644 --- a/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.en-ie.md +++ b/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.en-ie.md @@ -1,7 +1,7 @@ --- title: Cassandra - Capabilities and Limitations excerpt: Discover the capabilities and limitations of Public Cloud Databases for Cassandra -updated: 2025-07-31 +updated: 2025-08-25 --- > [!warning] @@ -30,7 +30,6 @@ The Public Cloud Databases offer is available in the following regions: - `GRA` (Gravelines, France) - `SBG` (Strasbourg, France) - `SGP` (Singapore, Singapore) -- `EU-WEST-PAR` (Paris, France) - `UK` (London, United Kingdom) - `WAW` (Warsaw, Poland) diff --git a/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.en-sg.md b/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.en-sg.md index 84c78fb4950..559f29b0e30 100644 --- a/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.en-sg.md +++ b/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.en-sg.md @@ -1,7 +1,7 @@ --- title: Cassandra - Capabilities and Limitations excerpt: Discover the capabilities and limitations of Public Cloud Databases for Cassandra -updated: 2025-07-31 +updated: 2025-08-25 --- > [!warning] @@ -30,7 +30,6 @@ The Public Cloud Databases offer is available in the following regions: - `GRA` (Gravelines, France) - `SBG` (Strasbourg, France) - `SGP` (Singapore, Singapore) -- `EU-WEST-PAR` (Paris, France) - `UK` (London, United Kingdom) - `WAW` (Warsaw, Poland) diff --git a/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.en-us.md b/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.en-us.md index 926393d3dd8..557ad875223 100644 --- a/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.en-us.md +++ b/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.en-us.md @@ -1,7 +1,7 @@ --- title: Cassandra - Capabilities and Limitations excerpt: Discover the capabilities and limitations of Public Cloud Databases for Cassandra -updated: 2025-07-31 +updated: 2025-08-25 --- > [!warning] @@ -30,7 +30,6 @@ The Public Cloud Databases offer is available in the following regions: - `GRA` (Gravelines, France) - `SBG` (Strasbourg, France) - `SGP` (Singapore, Singapore) -- `EU-WEST-PAR` (Paris, France) - `UK` (London, United Kingdom) - `WAW` (Warsaw, Poland) diff --git a/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.es-es.md b/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.es-es.md index 04af1844fd5..45ba42330ff 100644 --- a/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.es-es.md +++ b/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.es-es.md @@ -1,7 +1,7 @@ --- title: Cassandra - Capabilities and Limitations excerpt: Discover the capabilities and limitations of Public Cloud Databases for Cassandra -updated: 2025-07-31 +updated: 2025-08-25 --- > [!warning] @@ -30,7 +30,6 @@ The Public Cloud Databases offer is available in the following regions: - `GRA` (Gravelines, France) - `SBG` (Strasbourg, France) - `SGP` (Singapore, Singapore) -- `EU-WEST-PAR` (Paris, France) - `UK` (London, United Kingdom) - `WAW` (Warsaw, Poland) diff --git a/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.es-us.md b/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.es-us.md index 2778470acc6..87d7b160169 100644 --- a/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.es-us.md +++ b/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.es-us.md @@ -1,7 +1,7 @@ --- title: Cassandra - Capabilities and Limitations excerpt: Discover the capabilities and limitations of Public Cloud Databases for Cassandra -updated: 2025-07-31 +updated: 2025-08-25 --- > [!warning] @@ -30,7 +30,6 @@ The Public Cloud Databases offer is available in the following regions: - `GRA` (Gravelines, France) - `SBG` (Strasbourg, France) - `SGP` (Singapore, Singapore) -- `EU-WEST-PAR` (Paris, France) - `UK` (London, United Kingdom) - `WAW` (Warsaw, Poland) diff --git a/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.fr-ca.md b/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.fr-ca.md index 0fd32d96741..3386d1c88c7 100644 --- a/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.fr-ca.md +++ b/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.fr-ca.md @@ -1,7 +1,7 @@ --- title: Cassandra - Capacités et limitations (EN) excerpt: Discover the capabilities and limitations of Public Cloud Databases for Cassandra -updated: 2025-07-31 +updated: 2025-08-25 --- > [!warning] @@ -30,7 +30,6 @@ The Public Cloud Databases offer is available in the following regions: - `GRA` (Gravelines, France) - `SBG` (Strasbourg, France) - `SGP` (Singapore, Singapore) -- `EU-WEST-PAR` (Paris, France) - `UK` (London, United Kingdom) - `WAW` (Warsaw, Poland) diff --git a/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.fr-fr.md b/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.fr-fr.md index 5f4aa007443..f60396b999b 100644 --- a/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.fr-fr.md +++ b/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.fr-fr.md @@ -1,7 +1,7 @@ --- title: Cassandra - Capacités et limitations (EN) excerpt: Discover the capabilities and limitations of Public Cloud Databases for Cassandra -updated: 2025-07-31 +updated: 2025-08-25 --- > [!warning] @@ -30,7 +30,6 @@ The Public Cloud Databases offer is available in the following regions: - `GRA` (Gravelines, France) - `SBG` (Strasbourg, France) - `SGP` (Singapore, Singapore) -- `EU-WEST-PAR` (Paris, France) - `UK` (London, United Kingdom) - `WAW` (Warsaw, Poland) diff --git a/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.it-it.md b/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.it-it.md index 9e46c692cb4..c45222b80b3 100644 --- a/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.it-it.md +++ b/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.it-it.md @@ -1,7 +1,7 @@ --- title: Cassandra - Capabilities and Limitations excerpt: Discover the capabilities and limitations of Public Cloud Databases for Cassandra -updated: 2025-07-31 +updated: 2025-08-25 --- > [!warning] @@ -30,7 +30,6 @@ The Public Cloud Databases offer is available in the following regions: - `GRA` (Gravelines, France) - `SBG` (Strasbourg, France) - `SGP` (Singapore, Singapore) -- `EU-WEST-PAR` (Paris, France) - `UK` (London, United Kingdom) - `WAW` (Warsaw, Poland) diff --git a/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.pl-pl.md b/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.pl-pl.md index d58eb1bd782..d591a05e862 100644 --- a/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.pl-pl.md +++ b/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.pl-pl.md @@ -1,7 +1,7 @@ --- title: Cassandra - Capabilities and Limitations excerpt: Discover the capabilities and limitations of Public Cloud Databases for Cassandra -updated: 2025-07-31 +updated: 2025-08-25 --- > [!warning] @@ -30,7 +30,6 @@ The Public Cloud Databases offer is available in the following regions: - `GRA` (Gravelines, France) - `SBG` (Strasbourg, France) - `SGP` (Singapore, Singapore) -- `EU-WEST-PAR` (Paris, France) - `UK` (London, United Kingdom) - `WAW` (Warsaw, Poland) diff --git a/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.pt-pt.md b/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.pt-pt.md index a4dbf62bad0..0ea13ded205 100644 --- a/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.pt-pt.md +++ b/pages/public_cloud/public_cloud_databases/cassandra_01_capabilities/guide.pt-pt.md @@ -1,7 +1,7 @@ --- title: Cassandra - Capabilities and Limitations excerpt: Discover the capabilities and limitations of Public Cloud Databases for Cassandra -updated: 2025-07-31 +updated: 2025-08-25 --- > [!warning] @@ -30,7 +30,6 @@ The Public Cloud Databases offer is available in the following regions: - `GRA` (Gravelines, France) - `SBG` (Strasbourg, France) - `SGP` (Singapore, Singapore) -- `EU-WEST-PAR` (Paris, France) - `UK` (London, United Kingdom) - `WAW` (Warsaw, Poland) diff --git a/pages/public_cloud/public_cloud_databases/kafka_01_capabilities/meta.yaml b/pages/public_cloud/public_cloud_databases/kafka_01_capabilities/meta.yaml index 24ca4d2c4d4..7f868f13cd6 100755 --- a/pages/public_cloud/public_cloud_databases/kafka_01_capabilities/meta.yaml +++ b/pages/public_cloud/public_cloud_databases/kafka_01_capabilities/meta.yaml @@ -2,4 +2,4 @@ id: 00a50392-e50f-4fd5-92f0-d537de0b3d9b full_slug: public-cloud-databases-kafka-capabilities engine: kafka section: dashboard -reference_category: public-cloud-databases-kafka-guides \ No newline at end of file +reference_category: public-cloud-data-analytics-kafka-guides \ No newline at end of file diff --git a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.de-de.md b/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.de-de.md index 45123c53fb2..afa5fbf8ae1 100644 --- a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.de-de.md +++ b/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.de-de.md @@ -1,256 +1,35 @@ --- title: Kafka - Getting started -excerpt: Find out how to set up and manage your Kafka service -updated: 2025-08-04 +excerpt: Discover Kafka on OVHcloud Public Cloud +updated: 2025-08-25 --- -## Objectives +Kafka on OVHcloud is a fully managed, distributed data-streaming platform available through OVHcloud’s Public Cloud. Built on the open-source strength of Apache Kafka, it’s crafted for event-driven applications, near-real-time data transfer, streaming analytics, and high-throughput pipelines. Your data is stored in organized topics and partitions, enabling OVHcloud to handle multiple simultaneous data flows with scalability and resilience. With intuitive management via the OVHcloud Control Panel (or API/Terraform), you can provision, configure, and deploy Kafka clusters in minutes, offloading infrastructure management and keeping focus squarely on your data-driven use cases. -Apache Kafka is an open-source and highly resilient event streaming platform based on 3 main capabilities: +## Most viewed resources -- write or read data to/from stream events; -- store streams of events; -- process streams of events. +### Product page -You can get more information on Kafka from the [official Kafka website](https://kafka.apache.org/intro){.external}. +Discover how OVHcloud's Managed Apache Kafka® service lets you deploy and manage full Apache Kafka clusters within the Public Cloud complete with scalable topics and partitions, integrated tools like Schema Registry and high availability, plus support for Terraform, private networking, and competitive pay-as-you-go pricing: [Kafka product page](/links/public-cloud/analytics-kafka) -This guide explains how to successfully configure Public Cloud Databases for Kafka via the OVHcloud Control Panel. +### Pricing -## Requirements +Explore a comprehensive breakdown of OVHcloud’s Public Cloud pricing, where you’ll find detailed hourly and monthly rates across services like virtual instances, managed databases, streaming and analytics tools—including plans for Kafka: [Kafka pricing](/links/public-cloud/prices-kafka) -- access to the [OVHcloud Control Panel](/links/manager) -- a [Public Cloud project](/links/public-cloud/public-cloud) in your OVHcloud account +### Capabilities -## Instructions +Discover the technical capabilities and limitations of OVHcloud's managed Apache Kafka with detailed guidance on supported versions, resource limits, networking, logging, topics, users, and much more on this support page: [Kafka capabilities and limitations](/pages/public_cloud/public_cloud_databases/kafka_01_capabilities) -### Subscribe to the service +### Create a cluster -Log in to your [OVHcloud Control Panel](/links/manager) and switch to `Public Cloud`{.action} in the top navigation bar. After selecting your Public Cloud project, click on `Data Streaming`{.action} in the left-hand navigation bar under **Databases & Analytics**. +Explore step-by-step instructions on how to create a managed Kafka database cluster on OVHcloud Public Cloud via the Control Panel, datacentre selection, plan, instance configuration, storage sizing, and cluster setup options: [Create a Kafka cluster](/pages/public_cloud/data_analytics/analytics/kafka_create_cluster) -Click the `Create a service`{.action} button. +### Configure cluster for incoming connections -#### Select your analytics service - -Click on the type of analytics service you want to use and its version. -A random name is generated for your service that can change in this step or later. - -![Choose data streaming service](images/kafka_db_type.v2.png){.thumbnail} - -#### Select a datacentre - -Choose the geographical region of the datacentre where your service will be hosted and the deployment mode (1-AZ vs 3-AZ). - -![Choose a datacentre](images/kafka_region.v2.png){.thumbnail} - -#### Select a plan - -In this step, choose an appropriate service plan. If needed, you will be able to upgrade or downgrade the plan after creation. - -![Choose plan](images/kafka_solution_business.v2.png){.thumbnail} - -Please visit the [capabilities page](/products/public-cloud-data-analytics) of your selected analytics service for detailed information on each plan's properties. - -#### Select the instance - -Choose the instance type for the nodes of your service, you will be able to change it afterward. The number of nodes depends on the plan previously chosen. - -![Choose instance](images/kafka_features.v2.png){.thumbnail} - -#### Select the storage - -Storage could be scaled up to 3 time the base storage. - -![Choose storage](images/kafka_storage.v2.png){.thumbnail} - -#### Configure your options - -Choose the network options for your service and whitelist the IP addresses that will access the service. - -![Configure options](images/kafka_options.v2.png){.thumbnail} - -#### Review and confirm - -A summary of your order is display to help you review your service configuration. - -![Review order](images/kafka_configuration.1.v2.png){.thumbnail} - -The components of the price is also summarized with a monthly estimation. - -![Review pricing](images/kafka_configuration.2.v2.png){.thumbnail} - -Click the `API and Terraform equivalent`{.action} button to open the following window: - -![API and Terraform equivalent](images/kafka_configuration.3.v2.png){.thumbnail} - -The informations displayed in this window could help you automate your service creation with the [OVHcloud API](/pages/manage_and_operate/api/first-steps) or the OVHcloud Terraform Provider. - -When you are ready click the `Order`{.action} button to create your service. -In a matter of minutes, your new Apache Kafka service will be deployed. -Messages in the OVHcloud Control Panel will inform you when the streaming tool is ready to use. - -### Configure the Apache Kafka service - -Once the Public Cloud Databases for Kafka service is up and running, you will have to define at least one user and one authorised IP (if not already provided during the order) in order to fully connect to the service (as producer or consumer). - -![Kafka Concept](images/kafka_concept.png){.thumbnail} - -The `Dashboard`{.action} tab automatically updates when your service is ready. - -![Kafka General information](images/kafka_cluster_ready_to_configure.v2.png){.thumbnail} - -#### Mandatory: Set up a user - -Switch to the `Users`{.action} tab. An admin user name `avnadmin` is preconfigured during the service installation. - -![Users](images/kafka_users.v2.png){.thumbnail} - -You can add more users by clicking the `Add user`{.action} button. - -![Add a user](images/kafka_add_user.v2.png){.thumbnail} - -Enter a username, then click `Create User`{.action}. - -Passwords need to be reset from the `Users`{.action} table. - -![Password reset](images/kafka_user_password_reset1.v2.png){.thumbnail} - -#### Mandatory: Configure authorised IPs - -> [!warning] -> For security reasons the default network configuration doesn't allow any incoming connections. It is thus critical to authorize the suitable IP addresses in order to successfully access your Kafka cluster. - -If you did not define the authorised IPs during the order you could do it in the `Configuration`{.action} tab. At least one IP address must be authorised here before you can connect to your database. - -![Authorised IP](images/kafka_authorized_ip.v2.png){.thumbnail} - -Add the IP address of your computer by using the `Current IP`{.action} button. -You will be able to remove IPs from the table afterward. - -![Add IP](images/kafka_add_ip.v2.png){.thumbnail} - -Your Apache Kafka service is now fully accessible! -Optionally, you can configure access control lists (ACL) for granular permissions and create something called topics, as shown below. - -#### Optional: Create Kafka topics - -Topics can be seen as categories, allowing you to organize your Kafka records. Producers write to topics, and consumers read from topics. - -To create Kafka topics, first go to the `Topics`{.action} tab then click on the `Add a topic`{.action} button: - -![Add a topic](images/kafka_topics.v2.png){.thumbnail} - -In advanced configuration you can change the default value for the following parameters: - -- Minimum in-sync replica (2 by default) -- Partitions (1 partition by default) -- Replication (3 brokers by default) -- Retention size in bytes (-1: no limitation by default) -- Retention time in hours (-1: no limitation by default) -- Deletion policy - -![Create a topic](images/kafka_create_topic.v2.png){.thumbnail} - -#### Optional: Configure ACLs on topics - -Kafka supports access control lists (ACLs) to manage permissions on topics. This approach allows you to limit the operations that are available to specific connections and to restrict access to certain data sets, which improves the security of your data. - -By default the admin user has access to all topics with admin privileges. You can define some additional ACLs for all users / topics, by clicking on the `Add an ACL`{.action} button from the `ACL`{.action} tab: - -![Enable ACLs](images/kafka_acl.v2.png){.thumbnail} - -For a particular user, and one topic (or all with '*'), define the ACL with the following permissions: - -- **admin**: full access to APIs and topic -- **read**: allow only searching and retrieving data from a topic -- **write**: allow updating, adding, and deleting data from a topic -- **readwrite**: full access to the topic - -![Define ACLs](images/kafka_add_entry1.v2.png){.thumbnail} - -*Note*: Write permission allows the service user to create new indexes that match the pattern, but it does not allow deletion of those indexes. - -When multiple rules match, they are applied in the order listed above. If no rules match, access is denied. - -### First CLI connection - -> [!warning] -> Verify that the IP address visible from your browser application is part of the "Authorised IPs" defined for this Kafka service. -> -> Check also that the user has granted ACLs for the target topics. - -#### Download server and user certificates - -In order to connect to the Apache Kafka service, it is required to use server and user certificates. - -##### Server certificate - -The server CA (*Certificate Authority*) certificate can be downloaded from the `Dashboard`{.action} tab: - -![Kafka server certificate](images/kafka_get_server_certificate.v2.png){.thumbnail} - -##### User certificate and access key - -The user certificate and the user access key can be downloaded from the `Users`{.action} tab: - -![User informations](images/kafka_user_certificate_and_access_key.v2.png){.thumbnail} - -#### Install an Apache Kafka CLI - -As part of the Apache Kafka official installation, you will get different scripts that will also allow you to connect to Kafka in a Java 8+ environment: [Apache Kafka Official Quickstart](https://kafka.apache.org/quickstart). - -We propose to use a generic and more lightweight (does not require a JVM) producer and consumer client instead: `Kcat` (formerly known as `kafkacat`). - -##### **Install Kcat** - -For this client installation, please follow the instructions available at: [Kafkacat Official Github](https://github.com/edenhill/kcat). - -##### **Kcat configuration file** - -Let's create a configuration file to simplify the CLI commands to act as Kafka Producer and Consumer: - -kafkacat.conf : - -```text -bootstrap.servers=kafka-f411d2ae-f411d2ae.database.cloud.ovh.net:20186 -enable.ssl.certificate.verification=false -ssl.ca.location=/home/user/kafkacat/ca.pem -security.protocol=ssl -ssl.key.location=/home/user/kafkacat/service.key -ssl.certificate.location=/home/user/kafkacat/service.cert -``` - -In our example, the cluster address and port are **kafka-f411d2ae-f411d2ae.database.cloud.ovh.net:20186** and the previously downloaded CA certificates are in the **/home/user/kafkacat/** folder. - -Change theses values according to your own configuration. - -##### **Kafka producer** - -For this first example let's push the "test-message-key" and its "test-message-content" to the "my-topic" topic. - -```bash -echo test-message-content | kcat -F kafkacat.conf -P -t my-topic -k test-message-key -``` - -*Note*: depending on the installed binary, the CLI command can be either **kcat** or **kafkacat**. - -##### **Kafka consumer** - -The data can be retrieved from "my-topic". - -```bash -kcat -F kafkacat.conf -C -t my-topic -o -1 -e -``` - -*Note*: depending on the installed binary, the CLI command can be either **kcat** or **kafkacat**. - -## Conclusion - -Congratulations, you now have an up and running Apache Kafka cluster, fully managed and secured. You are able to push and retrieve data easily via CLI. +Explore practical guidance on how to configure your OVHcloud Public Cloud Kafka cluster to accept incoming connections covering access setup via the control panel or API and essential network settings: [Configure Kafka cluster connections](/pages/public_cloud/data_analytics/analytics/kafka_incoming_connections) ## Go further -[Kafka capabilities](/pages/public_cloud/public_cloud_databases/kafka_01_capabilities) - [Kafka Official documentation](https://kafka.apache.org/documentation/) [Kafka clients](https://cwiki.apache.org/confluence/display/KAFKA/Clients) diff --git a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.en-asia.md b/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.en-asia.md index 45123c53fb2..efe7b316b67 100644 --- a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.en-asia.md +++ b/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.en-asia.md @@ -1,256 +1,35 @@ --- title: Kafka - Getting started -excerpt: Find out how to set up and manage your Kafka service -updated: 2025-08-04 +excerpt: Discover Kafka on OVHcloud Public Cloud +updated: 2025-08-25 --- -## Objectives +Kafka on OVHcloud is a fully managed, distributed data-streaming platform available through OVHcloud’s Public Cloud. Built on the open-source strength of Apache Kafka, it’s crafted for event-driven applications, near-real-time data transfer, streaming analytics, and high-throughput pipelines. Your data is stored in organized topics and partitions, enabling OVHcloud to handle multiple simultaneous data flows with scalability and resilience. With intuitive management via the OVHcloud Control Panel (or API/Terraform), you can provision, configure, and deploy Kafka clusters in minutes, offloading infrastructure management and keeping focus squarely on your data-driven use cases. -Apache Kafka is an open-source and highly resilient event streaming platform based on 3 main capabilities: +## Most viewed resources -- write or read data to/from stream events; -- store streams of events; -- process streams of events. +### Product page -You can get more information on Kafka from the [official Kafka website](https://kafka.apache.org/intro){.external}. +Discover how OVHcloud's Managed Apache Kafka® service lets you deploy and manage full Apache Kafka clusters within the Public Cloud complete with scalable topics and partitions, integrated tools like Schema Registry and high availability, plus support for Terraform, private networking, and competitive pay-as-you-go pricing at: [Kafka product page](/links/public-cloud/analytics-kafka) -This guide explains how to successfully configure Public Cloud Databases for Kafka via the OVHcloud Control Panel. +### Pricing -## Requirements +Explore a comprehensive breakdown of OVHcloud’s Public Cloud pricing, where you’ll find detailed hourly and monthly rates across services like virtual instances, managed databases, streaming and analytics tools—including plans for Kafka at: [Kafka pricing](/links/public-cloud/prices-kafka) -- access to the [OVHcloud Control Panel](/links/manager) -- a [Public Cloud project](/links/public-cloud/public-cloud) in your OVHcloud account +### Capabilities -## Instructions +Discover the technical capabilities and limitations of OVHcloud's managed Apache Kafka with detailed guidance on supported versions, resource limits, networking, logging, topics, users, and much more—on this support page: [Kafka capabilities and limitations](/pages/public_cloud/public_cloud_databases/kafka_01_capabilities) -### Subscribe to the service +### Create a cluster -Log in to your [OVHcloud Control Panel](/links/manager) and switch to `Public Cloud`{.action} in the top navigation bar. After selecting your Public Cloud project, click on `Data Streaming`{.action} in the left-hand navigation bar under **Databases & Analytics**. +Explore step-by-step instructions on how to create a managed Kafka database cluster on OVHcloud Public Cloud via the Control Panel, datacentre selection, plan, instance configuration, storage sizing, and cluster setup options at: [Create a Kafka cluster](/pages/public_cloud/data_analytics/analytics/kafka_create_cluster) -Click the `Create a service`{.action} button. +### Configure cluster for incoming connections -#### Select your analytics service - -Click on the type of analytics service you want to use and its version. -A random name is generated for your service that can change in this step or later. - -![Choose data streaming service](images/kafka_db_type.v2.png){.thumbnail} - -#### Select a datacentre - -Choose the geographical region of the datacentre where your service will be hosted and the deployment mode (1-AZ vs 3-AZ). - -![Choose a datacentre](images/kafka_region.v2.png){.thumbnail} - -#### Select a plan - -In this step, choose an appropriate service plan. If needed, you will be able to upgrade or downgrade the plan after creation. - -![Choose plan](images/kafka_solution_business.v2.png){.thumbnail} - -Please visit the [capabilities page](/products/public-cloud-data-analytics) of your selected analytics service for detailed information on each plan's properties. - -#### Select the instance - -Choose the instance type for the nodes of your service, you will be able to change it afterward. The number of nodes depends on the plan previously chosen. - -![Choose instance](images/kafka_features.v2.png){.thumbnail} - -#### Select the storage - -Storage could be scaled up to 3 time the base storage. - -![Choose storage](images/kafka_storage.v2.png){.thumbnail} - -#### Configure your options - -Choose the network options for your service and whitelist the IP addresses that will access the service. - -![Configure options](images/kafka_options.v2.png){.thumbnail} - -#### Review and confirm - -A summary of your order is display to help you review your service configuration. - -![Review order](images/kafka_configuration.1.v2.png){.thumbnail} - -The components of the price is also summarized with a monthly estimation. - -![Review pricing](images/kafka_configuration.2.v2.png){.thumbnail} - -Click the `API and Terraform equivalent`{.action} button to open the following window: - -![API and Terraform equivalent](images/kafka_configuration.3.v2.png){.thumbnail} - -The informations displayed in this window could help you automate your service creation with the [OVHcloud API](/pages/manage_and_operate/api/first-steps) or the OVHcloud Terraform Provider. - -When you are ready click the `Order`{.action} button to create your service. -In a matter of minutes, your new Apache Kafka service will be deployed. -Messages in the OVHcloud Control Panel will inform you when the streaming tool is ready to use. - -### Configure the Apache Kafka service - -Once the Public Cloud Databases for Kafka service is up and running, you will have to define at least one user and one authorised IP (if not already provided during the order) in order to fully connect to the service (as producer or consumer). - -![Kafka Concept](images/kafka_concept.png){.thumbnail} - -The `Dashboard`{.action} tab automatically updates when your service is ready. - -![Kafka General information](images/kafka_cluster_ready_to_configure.v2.png){.thumbnail} - -#### Mandatory: Set up a user - -Switch to the `Users`{.action} tab. An admin user name `avnadmin` is preconfigured during the service installation. - -![Users](images/kafka_users.v2.png){.thumbnail} - -You can add more users by clicking the `Add user`{.action} button. - -![Add a user](images/kafka_add_user.v2.png){.thumbnail} - -Enter a username, then click `Create User`{.action}. - -Passwords need to be reset from the `Users`{.action} table. - -![Password reset](images/kafka_user_password_reset1.v2.png){.thumbnail} - -#### Mandatory: Configure authorised IPs - -> [!warning] -> For security reasons the default network configuration doesn't allow any incoming connections. It is thus critical to authorize the suitable IP addresses in order to successfully access your Kafka cluster. - -If you did not define the authorised IPs during the order you could do it in the `Configuration`{.action} tab. At least one IP address must be authorised here before you can connect to your database. - -![Authorised IP](images/kafka_authorized_ip.v2.png){.thumbnail} - -Add the IP address of your computer by using the `Current IP`{.action} button. -You will be able to remove IPs from the table afterward. - -![Add IP](images/kafka_add_ip.v2.png){.thumbnail} - -Your Apache Kafka service is now fully accessible! -Optionally, you can configure access control lists (ACL) for granular permissions and create something called topics, as shown below. - -#### Optional: Create Kafka topics - -Topics can be seen as categories, allowing you to organize your Kafka records. Producers write to topics, and consumers read from topics. - -To create Kafka topics, first go to the `Topics`{.action} tab then click on the `Add a topic`{.action} button: - -![Add a topic](images/kafka_topics.v2.png){.thumbnail} - -In advanced configuration you can change the default value for the following parameters: - -- Minimum in-sync replica (2 by default) -- Partitions (1 partition by default) -- Replication (3 brokers by default) -- Retention size in bytes (-1: no limitation by default) -- Retention time in hours (-1: no limitation by default) -- Deletion policy - -![Create a topic](images/kafka_create_topic.v2.png){.thumbnail} - -#### Optional: Configure ACLs on topics - -Kafka supports access control lists (ACLs) to manage permissions on topics. This approach allows you to limit the operations that are available to specific connections and to restrict access to certain data sets, which improves the security of your data. - -By default the admin user has access to all topics with admin privileges. You can define some additional ACLs for all users / topics, by clicking on the `Add an ACL`{.action} button from the `ACL`{.action} tab: - -![Enable ACLs](images/kafka_acl.v2.png){.thumbnail} - -For a particular user, and one topic (or all with '*'), define the ACL with the following permissions: - -- **admin**: full access to APIs and topic -- **read**: allow only searching and retrieving data from a topic -- **write**: allow updating, adding, and deleting data from a topic -- **readwrite**: full access to the topic - -![Define ACLs](images/kafka_add_entry1.v2.png){.thumbnail} - -*Note*: Write permission allows the service user to create new indexes that match the pattern, but it does not allow deletion of those indexes. - -When multiple rules match, they are applied in the order listed above. If no rules match, access is denied. - -### First CLI connection - -> [!warning] -> Verify that the IP address visible from your browser application is part of the "Authorised IPs" defined for this Kafka service. -> -> Check also that the user has granted ACLs for the target topics. - -#### Download server and user certificates - -In order to connect to the Apache Kafka service, it is required to use server and user certificates. - -##### Server certificate - -The server CA (*Certificate Authority*) certificate can be downloaded from the `Dashboard`{.action} tab: - -![Kafka server certificate](images/kafka_get_server_certificate.v2.png){.thumbnail} - -##### User certificate and access key - -The user certificate and the user access key can be downloaded from the `Users`{.action} tab: - -![User informations](images/kafka_user_certificate_and_access_key.v2.png){.thumbnail} - -#### Install an Apache Kafka CLI - -As part of the Apache Kafka official installation, you will get different scripts that will also allow you to connect to Kafka in a Java 8+ environment: [Apache Kafka Official Quickstart](https://kafka.apache.org/quickstart). - -We propose to use a generic and more lightweight (does not require a JVM) producer and consumer client instead: `Kcat` (formerly known as `kafkacat`). - -##### **Install Kcat** - -For this client installation, please follow the instructions available at: [Kafkacat Official Github](https://github.com/edenhill/kcat). - -##### **Kcat configuration file** - -Let's create a configuration file to simplify the CLI commands to act as Kafka Producer and Consumer: - -kafkacat.conf : - -```text -bootstrap.servers=kafka-f411d2ae-f411d2ae.database.cloud.ovh.net:20186 -enable.ssl.certificate.verification=false -ssl.ca.location=/home/user/kafkacat/ca.pem -security.protocol=ssl -ssl.key.location=/home/user/kafkacat/service.key -ssl.certificate.location=/home/user/kafkacat/service.cert -``` - -In our example, the cluster address and port are **kafka-f411d2ae-f411d2ae.database.cloud.ovh.net:20186** and the previously downloaded CA certificates are in the **/home/user/kafkacat/** folder. - -Change theses values according to your own configuration. - -##### **Kafka producer** - -For this first example let's push the "test-message-key" and its "test-message-content" to the "my-topic" topic. - -```bash -echo test-message-content | kcat -F kafkacat.conf -P -t my-topic -k test-message-key -``` - -*Note*: depending on the installed binary, the CLI command can be either **kcat** or **kafkacat**. - -##### **Kafka consumer** - -The data can be retrieved from "my-topic". - -```bash -kcat -F kafkacat.conf -C -t my-topic -o -1 -e -``` - -*Note*: depending on the installed binary, the CLI command can be either **kcat** or **kafkacat**. - -## Conclusion - -Congratulations, you now have an up and running Apache Kafka cluster, fully managed and secured. You are able to push and retrieve data easily via CLI. +Explore practical guidance on how to configure your OVHcloud Public Cloud Kafka cluster to accept incoming connections covering access setup via the control panel or API and essential network settings at: [Configure Kafka cluster connections](/pages/public_cloud/data_analytics/analytics/kafka_incoming_connections) ## Go further -[Kafka capabilities](/pages/public_cloud/public_cloud_databases/kafka_01_capabilities) - [Kafka Official documentation](https://kafka.apache.org/documentation/) [Kafka clients](https://cwiki.apache.org/confluence/display/KAFKA/Clients) diff --git a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.en-au.md b/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.en-au.md index 45123c53fb2..efe7b316b67 100644 --- a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.en-au.md +++ b/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.en-au.md @@ -1,256 +1,35 @@ --- title: Kafka - Getting started -excerpt: Find out how to set up and manage your Kafka service -updated: 2025-08-04 +excerpt: Discover Kafka on OVHcloud Public Cloud +updated: 2025-08-25 --- -## Objectives +Kafka on OVHcloud is a fully managed, distributed data-streaming platform available through OVHcloud’s Public Cloud. Built on the open-source strength of Apache Kafka, it’s crafted for event-driven applications, near-real-time data transfer, streaming analytics, and high-throughput pipelines. Your data is stored in organized topics and partitions, enabling OVHcloud to handle multiple simultaneous data flows with scalability and resilience. With intuitive management via the OVHcloud Control Panel (or API/Terraform), you can provision, configure, and deploy Kafka clusters in minutes, offloading infrastructure management and keeping focus squarely on your data-driven use cases. -Apache Kafka is an open-source and highly resilient event streaming platform based on 3 main capabilities: +## Most viewed resources -- write or read data to/from stream events; -- store streams of events; -- process streams of events. +### Product page -You can get more information on Kafka from the [official Kafka website](https://kafka.apache.org/intro){.external}. +Discover how OVHcloud's Managed Apache Kafka® service lets you deploy and manage full Apache Kafka clusters within the Public Cloud complete with scalable topics and partitions, integrated tools like Schema Registry and high availability, plus support for Terraform, private networking, and competitive pay-as-you-go pricing at: [Kafka product page](/links/public-cloud/analytics-kafka) -This guide explains how to successfully configure Public Cloud Databases for Kafka via the OVHcloud Control Panel. +### Pricing -## Requirements +Explore a comprehensive breakdown of OVHcloud’s Public Cloud pricing, where you’ll find detailed hourly and monthly rates across services like virtual instances, managed databases, streaming and analytics tools—including plans for Kafka at: [Kafka pricing](/links/public-cloud/prices-kafka) -- access to the [OVHcloud Control Panel](/links/manager) -- a [Public Cloud project](/links/public-cloud/public-cloud) in your OVHcloud account +### Capabilities -## Instructions +Discover the technical capabilities and limitations of OVHcloud's managed Apache Kafka with detailed guidance on supported versions, resource limits, networking, logging, topics, users, and much more—on this support page: [Kafka capabilities and limitations](/pages/public_cloud/public_cloud_databases/kafka_01_capabilities) -### Subscribe to the service +### Create a cluster -Log in to your [OVHcloud Control Panel](/links/manager) and switch to `Public Cloud`{.action} in the top navigation bar. After selecting your Public Cloud project, click on `Data Streaming`{.action} in the left-hand navigation bar under **Databases & Analytics**. +Explore step-by-step instructions on how to create a managed Kafka database cluster on OVHcloud Public Cloud via the Control Panel, datacentre selection, plan, instance configuration, storage sizing, and cluster setup options at: [Create a Kafka cluster](/pages/public_cloud/data_analytics/analytics/kafka_create_cluster) -Click the `Create a service`{.action} button. +### Configure cluster for incoming connections -#### Select your analytics service - -Click on the type of analytics service you want to use and its version. -A random name is generated for your service that can change in this step or later. - -![Choose data streaming service](images/kafka_db_type.v2.png){.thumbnail} - -#### Select a datacentre - -Choose the geographical region of the datacentre where your service will be hosted and the deployment mode (1-AZ vs 3-AZ). - -![Choose a datacentre](images/kafka_region.v2.png){.thumbnail} - -#### Select a plan - -In this step, choose an appropriate service plan. If needed, you will be able to upgrade or downgrade the plan after creation. - -![Choose plan](images/kafka_solution_business.v2.png){.thumbnail} - -Please visit the [capabilities page](/products/public-cloud-data-analytics) of your selected analytics service for detailed information on each plan's properties. - -#### Select the instance - -Choose the instance type for the nodes of your service, you will be able to change it afterward. The number of nodes depends on the plan previously chosen. - -![Choose instance](images/kafka_features.v2.png){.thumbnail} - -#### Select the storage - -Storage could be scaled up to 3 time the base storage. - -![Choose storage](images/kafka_storage.v2.png){.thumbnail} - -#### Configure your options - -Choose the network options for your service and whitelist the IP addresses that will access the service. - -![Configure options](images/kafka_options.v2.png){.thumbnail} - -#### Review and confirm - -A summary of your order is display to help you review your service configuration. - -![Review order](images/kafka_configuration.1.v2.png){.thumbnail} - -The components of the price is also summarized with a monthly estimation. - -![Review pricing](images/kafka_configuration.2.v2.png){.thumbnail} - -Click the `API and Terraform equivalent`{.action} button to open the following window: - -![API and Terraform equivalent](images/kafka_configuration.3.v2.png){.thumbnail} - -The informations displayed in this window could help you automate your service creation with the [OVHcloud API](/pages/manage_and_operate/api/first-steps) or the OVHcloud Terraform Provider. - -When you are ready click the `Order`{.action} button to create your service. -In a matter of minutes, your new Apache Kafka service will be deployed. -Messages in the OVHcloud Control Panel will inform you when the streaming tool is ready to use. - -### Configure the Apache Kafka service - -Once the Public Cloud Databases for Kafka service is up and running, you will have to define at least one user and one authorised IP (if not already provided during the order) in order to fully connect to the service (as producer or consumer). - -![Kafka Concept](images/kafka_concept.png){.thumbnail} - -The `Dashboard`{.action} tab automatically updates when your service is ready. - -![Kafka General information](images/kafka_cluster_ready_to_configure.v2.png){.thumbnail} - -#### Mandatory: Set up a user - -Switch to the `Users`{.action} tab. An admin user name `avnadmin` is preconfigured during the service installation. - -![Users](images/kafka_users.v2.png){.thumbnail} - -You can add more users by clicking the `Add user`{.action} button. - -![Add a user](images/kafka_add_user.v2.png){.thumbnail} - -Enter a username, then click `Create User`{.action}. - -Passwords need to be reset from the `Users`{.action} table. - -![Password reset](images/kafka_user_password_reset1.v2.png){.thumbnail} - -#### Mandatory: Configure authorised IPs - -> [!warning] -> For security reasons the default network configuration doesn't allow any incoming connections. It is thus critical to authorize the suitable IP addresses in order to successfully access your Kafka cluster. - -If you did not define the authorised IPs during the order you could do it in the `Configuration`{.action} tab. At least one IP address must be authorised here before you can connect to your database. - -![Authorised IP](images/kafka_authorized_ip.v2.png){.thumbnail} - -Add the IP address of your computer by using the `Current IP`{.action} button. -You will be able to remove IPs from the table afterward. - -![Add IP](images/kafka_add_ip.v2.png){.thumbnail} - -Your Apache Kafka service is now fully accessible! -Optionally, you can configure access control lists (ACL) for granular permissions and create something called topics, as shown below. - -#### Optional: Create Kafka topics - -Topics can be seen as categories, allowing you to organize your Kafka records. Producers write to topics, and consumers read from topics. - -To create Kafka topics, first go to the `Topics`{.action} tab then click on the `Add a topic`{.action} button: - -![Add a topic](images/kafka_topics.v2.png){.thumbnail} - -In advanced configuration you can change the default value for the following parameters: - -- Minimum in-sync replica (2 by default) -- Partitions (1 partition by default) -- Replication (3 brokers by default) -- Retention size in bytes (-1: no limitation by default) -- Retention time in hours (-1: no limitation by default) -- Deletion policy - -![Create a topic](images/kafka_create_topic.v2.png){.thumbnail} - -#### Optional: Configure ACLs on topics - -Kafka supports access control lists (ACLs) to manage permissions on topics. This approach allows you to limit the operations that are available to specific connections and to restrict access to certain data sets, which improves the security of your data. - -By default the admin user has access to all topics with admin privileges. You can define some additional ACLs for all users / topics, by clicking on the `Add an ACL`{.action} button from the `ACL`{.action} tab: - -![Enable ACLs](images/kafka_acl.v2.png){.thumbnail} - -For a particular user, and one topic (or all with '*'), define the ACL with the following permissions: - -- **admin**: full access to APIs and topic -- **read**: allow only searching and retrieving data from a topic -- **write**: allow updating, adding, and deleting data from a topic -- **readwrite**: full access to the topic - -![Define ACLs](images/kafka_add_entry1.v2.png){.thumbnail} - -*Note*: Write permission allows the service user to create new indexes that match the pattern, but it does not allow deletion of those indexes. - -When multiple rules match, they are applied in the order listed above. If no rules match, access is denied. - -### First CLI connection - -> [!warning] -> Verify that the IP address visible from your browser application is part of the "Authorised IPs" defined for this Kafka service. -> -> Check also that the user has granted ACLs for the target topics. - -#### Download server and user certificates - -In order to connect to the Apache Kafka service, it is required to use server and user certificates. - -##### Server certificate - -The server CA (*Certificate Authority*) certificate can be downloaded from the `Dashboard`{.action} tab: - -![Kafka server certificate](images/kafka_get_server_certificate.v2.png){.thumbnail} - -##### User certificate and access key - -The user certificate and the user access key can be downloaded from the `Users`{.action} tab: - -![User informations](images/kafka_user_certificate_and_access_key.v2.png){.thumbnail} - -#### Install an Apache Kafka CLI - -As part of the Apache Kafka official installation, you will get different scripts that will also allow you to connect to Kafka in a Java 8+ environment: [Apache Kafka Official Quickstart](https://kafka.apache.org/quickstart). - -We propose to use a generic and more lightweight (does not require a JVM) producer and consumer client instead: `Kcat` (formerly known as `kafkacat`). - -##### **Install Kcat** - -For this client installation, please follow the instructions available at: [Kafkacat Official Github](https://github.com/edenhill/kcat). - -##### **Kcat configuration file** - -Let's create a configuration file to simplify the CLI commands to act as Kafka Producer and Consumer: - -kafkacat.conf : - -```text -bootstrap.servers=kafka-f411d2ae-f411d2ae.database.cloud.ovh.net:20186 -enable.ssl.certificate.verification=false -ssl.ca.location=/home/user/kafkacat/ca.pem -security.protocol=ssl -ssl.key.location=/home/user/kafkacat/service.key -ssl.certificate.location=/home/user/kafkacat/service.cert -``` - -In our example, the cluster address and port are **kafka-f411d2ae-f411d2ae.database.cloud.ovh.net:20186** and the previously downloaded CA certificates are in the **/home/user/kafkacat/** folder. - -Change theses values according to your own configuration. - -##### **Kafka producer** - -For this first example let's push the "test-message-key" and its "test-message-content" to the "my-topic" topic. - -```bash -echo test-message-content | kcat -F kafkacat.conf -P -t my-topic -k test-message-key -``` - -*Note*: depending on the installed binary, the CLI command can be either **kcat** or **kafkacat**. - -##### **Kafka consumer** - -The data can be retrieved from "my-topic". - -```bash -kcat -F kafkacat.conf -C -t my-topic -o -1 -e -``` - -*Note*: depending on the installed binary, the CLI command can be either **kcat** or **kafkacat**. - -## Conclusion - -Congratulations, you now have an up and running Apache Kafka cluster, fully managed and secured. You are able to push and retrieve data easily via CLI. +Explore practical guidance on how to configure your OVHcloud Public Cloud Kafka cluster to accept incoming connections covering access setup via the control panel or API and essential network settings at: [Configure Kafka cluster connections](/pages/public_cloud/data_analytics/analytics/kafka_incoming_connections) ## Go further -[Kafka capabilities](/pages/public_cloud/public_cloud_databases/kafka_01_capabilities) - [Kafka Official documentation](https://kafka.apache.org/documentation/) [Kafka clients](https://cwiki.apache.org/confluence/display/KAFKA/Clients) diff --git a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.en-ca.md b/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.en-ca.md index 45123c53fb2..efe7b316b67 100644 --- a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.en-ca.md +++ b/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.en-ca.md @@ -1,256 +1,35 @@ --- title: Kafka - Getting started -excerpt: Find out how to set up and manage your Kafka service -updated: 2025-08-04 +excerpt: Discover Kafka on OVHcloud Public Cloud +updated: 2025-08-25 --- -## Objectives +Kafka on OVHcloud is a fully managed, distributed data-streaming platform available through OVHcloud’s Public Cloud. Built on the open-source strength of Apache Kafka, it’s crafted for event-driven applications, near-real-time data transfer, streaming analytics, and high-throughput pipelines. Your data is stored in organized topics and partitions, enabling OVHcloud to handle multiple simultaneous data flows with scalability and resilience. With intuitive management via the OVHcloud Control Panel (or API/Terraform), you can provision, configure, and deploy Kafka clusters in minutes, offloading infrastructure management and keeping focus squarely on your data-driven use cases. -Apache Kafka is an open-source and highly resilient event streaming platform based on 3 main capabilities: +## Most viewed resources -- write or read data to/from stream events; -- store streams of events; -- process streams of events. +### Product page -You can get more information on Kafka from the [official Kafka website](https://kafka.apache.org/intro){.external}. +Discover how OVHcloud's Managed Apache Kafka® service lets you deploy and manage full Apache Kafka clusters within the Public Cloud complete with scalable topics and partitions, integrated tools like Schema Registry and high availability, plus support for Terraform, private networking, and competitive pay-as-you-go pricing at: [Kafka product page](/links/public-cloud/analytics-kafka) -This guide explains how to successfully configure Public Cloud Databases for Kafka via the OVHcloud Control Panel. +### Pricing -## Requirements +Explore a comprehensive breakdown of OVHcloud’s Public Cloud pricing, where you’ll find detailed hourly and monthly rates across services like virtual instances, managed databases, streaming and analytics tools—including plans for Kafka at: [Kafka pricing](/links/public-cloud/prices-kafka) -- access to the [OVHcloud Control Panel](/links/manager) -- a [Public Cloud project](/links/public-cloud/public-cloud) in your OVHcloud account +### Capabilities -## Instructions +Discover the technical capabilities and limitations of OVHcloud's managed Apache Kafka with detailed guidance on supported versions, resource limits, networking, logging, topics, users, and much more—on this support page: [Kafka capabilities and limitations](/pages/public_cloud/public_cloud_databases/kafka_01_capabilities) -### Subscribe to the service +### Create a cluster -Log in to your [OVHcloud Control Panel](/links/manager) and switch to `Public Cloud`{.action} in the top navigation bar. After selecting your Public Cloud project, click on `Data Streaming`{.action} in the left-hand navigation bar under **Databases & Analytics**. +Explore step-by-step instructions on how to create a managed Kafka database cluster on OVHcloud Public Cloud via the Control Panel, datacentre selection, plan, instance configuration, storage sizing, and cluster setup options at: [Create a Kafka cluster](/pages/public_cloud/data_analytics/analytics/kafka_create_cluster) -Click the `Create a service`{.action} button. +### Configure cluster for incoming connections -#### Select your analytics service - -Click on the type of analytics service you want to use and its version. -A random name is generated for your service that can change in this step or later. - -![Choose data streaming service](images/kafka_db_type.v2.png){.thumbnail} - -#### Select a datacentre - -Choose the geographical region of the datacentre where your service will be hosted and the deployment mode (1-AZ vs 3-AZ). - -![Choose a datacentre](images/kafka_region.v2.png){.thumbnail} - -#### Select a plan - -In this step, choose an appropriate service plan. If needed, you will be able to upgrade or downgrade the plan after creation. - -![Choose plan](images/kafka_solution_business.v2.png){.thumbnail} - -Please visit the [capabilities page](/products/public-cloud-data-analytics) of your selected analytics service for detailed information on each plan's properties. - -#### Select the instance - -Choose the instance type for the nodes of your service, you will be able to change it afterward. The number of nodes depends on the plan previously chosen. - -![Choose instance](images/kafka_features.v2.png){.thumbnail} - -#### Select the storage - -Storage could be scaled up to 3 time the base storage. - -![Choose storage](images/kafka_storage.v2.png){.thumbnail} - -#### Configure your options - -Choose the network options for your service and whitelist the IP addresses that will access the service. - -![Configure options](images/kafka_options.v2.png){.thumbnail} - -#### Review and confirm - -A summary of your order is display to help you review your service configuration. - -![Review order](images/kafka_configuration.1.v2.png){.thumbnail} - -The components of the price is also summarized with a monthly estimation. - -![Review pricing](images/kafka_configuration.2.v2.png){.thumbnail} - -Click the `API and Terraform equivalent`{.action} button to open the following window: - -![API and Terraform equivalent](images/kafka_configuration.3.v2.png){.thumbnail} - -The informations displayed in this window could help you automate your service creation with the [OVHcloud API](/pages/manage_and_operate/api/first-steps) or the OVHcloud Terraform Provider. - -When you are ready click the `Order`{.action} button to create your service. -In a matter of minutes, your new Apache Kafka service will be deployed. -Messages in the OVHcloud Control Panel will inform you when the streaming tool is ready to use. - -### Configure the Apache Kafka service - -Once the Public Cloud Databases for Kafka service is up and running, you will have to define at least one user and one authorised IP (if not already provided during the order) in order to fully connect to the service (as producer or consumer). - -![Kafka Concept](images/kafka_concept.png){.thumbnail} - -The `Dashboard`{.action} tab automatically updates when your service is ready. - -![Kafka General information](images/kafka_cluster_ready_to_configure.v2.png){.thumbnail} - -#### Mandatory: Set up a user - -Switch to the `Users`{.action} tab. An admin user name `avnadmin` is preconfigured during the service installation. - -![Users](images/kafka_users.v2.png){.thumbnail} - -You can add more users by clicking the `Add user`{.action} button. - -![Add a user](images/kafka_add_user.v2.png){.thumbnail} - -Enter a username, then click `Create User`{.action}. - -Passwords need to be reset from the `Users`{.action} table. - -![Password reset](images/kafka_user_password_reset1.v2.png){.thumbnail} - -#### Mandatory: Configure authorised IPs - -> [!warning] -> For security reasons the default network configuration doesn't allow any incoming connections. It is thus critical to authorize the suitable IP addresses in order to successfully access your Kafka cluster. - -If you did not define the authorised IPs during the order you could do it in the `Configuration`{.action} tab. At least one IP address must be authorised here before you can connect to your database. - -![Authorised IP](images/kafka_authorized_ip.v2.png){.thumbnail} - -Add the IP address of your computer by using the `Current IP`{.action} button. -You will be able to remove IPs from the table afterward. - -![Add IP](images/kafka_add_ip.v2.png){.thumbnail} - -Your Apache Kafka service is now fully accessible! -Optionally, you can configure access control lists (ACL) for granular permissions and create something called topics, as shown below. - -#### Optional: Create Kafka topics - -Topics can be seen as categories, allowing you to organize your Kafka records. Producers write to topics, and consumers read from topics. - -To create Kafka topics, first go to the `Topics`{.action} tab then click on the `Add a topic`{.action} button: - -![Add a topic](images/kafka_topics.v2.png){.thumbnail} - -In advanced configuration you can change the default value for the following parameters: - -- Minimum in-sync replica (2 by default) -- Partitions (1 partition by default) -- Replication (3 brokers by default) -- Retention size in bytes (-1: no limitation by default) -- Retention time in hours (-1: no limitation by default) -- Deletion policy - -![Create a topic](images/kafka_create_topic.v2.png){.thumbnail} - -#### Optional: Configure ACLs on topics - -Kafka supports access control lists (ACLs) to manage permissions on topics. This approach allows you to limit the operations that are available to specific connections and to restrict access to certain data sets, which improves the security of your data. - -By default the admin user has access to all topics with admin privileges. You can define some additional ACLs for all users / topics, by clicking on the `Add an ACL`{.action} button from the `ACL`{.action} tab: - -![Enable ACLs](images/kafka_acl.v2.png){.thumbnail} - -For a particular user, and one topic (or all with '*'), define the ACL with the following permissions: - -- **admin**: full access to APIs and topic -- **read**: allow only searching and retrieving data from a topic -- **write**: allow updating, adding, and deleting data from a topic -- **readwrite**: full access to the topic - -![Define ACLs](images/kafka_add_entry1.v2.png){.thumbnail} - -*Note*: Write permission allows the service user to create new indexes that match the pattern, but it does not allow deletion of those indexes. - -When multiple rules match, they are applied in the order listed above. If no rules match, access is denied. - -### First CLI connection - -> [!warning] -> Verify that the IP address visible from your browser application is part of the "Authorised IPs" defined for this Kafka service. -> -> Check also that the user has granted ACLs for the target topics. - -#### Download server and user certificates - -In order to connect to the Apache Kafka service, it is required to use server and user certificates. - -##### Server certificate - -The server CA (*Certificate Authority*) certificate can be downloaded from the `Dashboard`{.action} tab: - -![Kafka server certificate](images/kafka_get_server_certificate.v2.png){.thumbnail} - -##### User certificate and access key - -The user certificate and the user access key can be downloaded from the `Users`{.action} tab: - -![User informations](images/kafka_user_certificate_and_access_key.v2.png){.thumbnail} - -#### Install an Apache Kafka CLI - -As part of the Apache Kafka official installation, you will get different scripts that will also allow you to connect to Kafka in a Java 8+ environment: [Apache Kafka Official Quickstart](https://kafka.apache.org/quickstart). - -We propose to use a generic and more lightweight (does not require a JVM) producer and consumer client instead: `Kcat` (formerly known as `kafkacat`). - -##### **Install Kcat** - -For this client installation, please follow the instructions available at: [Kafkacat Official Github](https://github.com/edenhill/kcat). - -##### **Kcat configuration file** - -Let's create a configuration file to simplify the CLI commands to act as Kafka Producer and Consumer: - -kafkacat.conf : - -```text -bootstrap.servers=kafka-f411d2ae-f411d2ae.database.cloud.ovh.net:20186 -enable.ssl.certificate.verification=false -ssl.ca.location=/home/user/kafkacat/ca.pem -security.protocol=ssl -ssl.key.location=/home/user/kafkacat/service.key -ssl.certificate.location=/home/user/kafkacat/service.cert -``` - -In our example, the cluster address and port are **kafka-f411d2ae-f411d2ae.database.cloud.ovh.net:20186** and the previously downloaded CA certificates are in the **/home/user/kafkacat/** folder. - -Change theses values according to your own configuration. - -##### **Kafka producer** - -For this first example let's push the "test-message-key" and its "test-message-content" to the "my-topic" topic. - -```bash -echo test-message-content | kcat -F kafkacat.conf -P -t my-topic -k test-message-key -``` - -*Note*: depending on the installed binary, the CLI command can be either **kcat** or **kafkacat**. - -##### **Kafka consumer** - -The data can be retrieved from "my-topic". - -```bash -kcat -F kafkacat.conf -C -t my-topic -o -1 -e -``` - -*Note*: depending on the installed binary, the CLI command can be either **kcat** or **kafkacat**. - -## Conclusion - -Congratulations, you now have an up and running Apache Kafka cluster, fully managed and secured. You are able to push and retrieve data easily via CLI. +Explore practical guidance on how to configure your OVHcloud Public Cloud Kafka cluster to accept incoming connections covering access setup via the control panel or API and essential network settings at: [Configure Kafka cluster connections](/pages/public_cloud/data_analytics/analytics/kafka_incoming_connections) ## Go further -[Kafka capabilities](/pages/public_cloud/public_cloud_databases/kafka_01_capabilities) - [Kafka Official documentation](https://kafka.apache.org/documentation/) [Kafka clients](https://cwiki.apache.org/confluence/display/KAFKA/Clients) diff --git a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.en-gb.md b/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.en-gb.md index 45123c53fb2..efe7b316b67 100644 --- a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.en-gb.md +++ b/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.en-gb.md @@ -1,256 +1,35 @@ --- title: Kafka - Getting started -excerpt: Find out how to set up and manage your Kafka service -updated: 2025-08-04 +excerpt: Discover Kafka on OVHcloud Public Cloud +updated: 2025-08-25 --- -## Objectives +Kafka on OVHcloud is a fully managed, distributed data-streaming platform available through OVHcloud’s Public Cloud. Built on the open-source strength of Apache Kafka, it’s crafted for event-driven applications, near-real-time data transfer, streaming analytics, and high-throughput pipelines. Your data is stored in organized topics and partitions, enabling OVHcloud to handle multiple simultaneous data flows with scalability and resilience. With intuitive management via the OVHcloud Control Panel (or API/Terraform), you can provision, configure, and deploy Kafka clusters in minutes, offloading infrastructure management and keeping focus squarely on your data-driven use cases. -Apache Kafka is an open-source and highly resilient event streaming platform based on 3 main capabilities: +## Most viewed resources -- write or read data to/from stream events; -- store streams of events; -- process streams of events. +### Product page -You can get more information on Kafka from the [official Kafka website](https://kafka.apache.org/intro){.external}. +Discover how OVHcloud's Managed Apache Kafka® service lets you deploy and manage full Apache Kafka clusters within the Public Cloud complete with scalable topics and partitions, integrated tools like Schema Registry and high availability, plus support for Terraform, private networking, and competitive pay-as-you-go pricing at: [Kafka product page](/links/public-cloud/analytics-kafka) -This guide explains how to successfully configure Public Cloud Databases for Kafka via the OVHcloud Control Panel. +### Pricing -## Requirements +Explore a comprehensive breakdown of OVHcloud’s Public Cloud pricing, where you’ll find detailed hourly and monthly rates across services like virtual instances, managed databases, streaming and analytics tools—including plans for Kafka at: [Kafka pricing](/links/public-cloud/prices-kafka) -- access to the [OVHcloud Control Panel](/links/manager) -- a [Public Cloud project](/links/public-cloud/public-cloud) in your OVHcloud account +### Capabilities -## Instructions +Discover the technical capabilities and limitations of OVHcloud's managed Apache Kafka with detailed guidance on supported versions, resource limits, networking, logging, topics, users, and much more—on this support page: [Kafka capabilities and limitations](/pages/public_cloud/public_cloud_databases/kafka_01_capabilities) -### Subscribe to the service +### Create a cluster -Log in to your [OVHcloud Control Panel](/links/manager) and switch to `Public Cloud`{.action} in the top navigation bar. After selecting your Public Cloud project, click on `Data Streaming`{.action} in the left-hand navigation bar under **Databases & Analytics**. +Explore step-by-step instructions on how to create a managed Kafka database cluster on OVHcloud Public Cloud via the Control Panel, datacentre selection, plan, instance configuration, storage sizing, and cluster setup options at: [Create a Kafka cluster](/pages/public_cloud/data_analytics/analytics/kafka_create_cluster) -Click the `Create a service`{.action} button. +### Configure cluster for incoming connections -#### Select your analytics service - -Click on the type of analytics service you want to use and its version. -A random name is generated for your service that can change in this step or later. - -![Choose data streaming service](images/kafka_db_type.v2.png){.thumbnail} - -#### Select a datacentre - -Choose the geographical region of the datacentre where your service will be hosted and the deployment mode (1-AZ vs 3-AZ). - -![Choose a datacentre](images/kafka_region.v2.png){.thumbnail} - -#### Select a plan - -In this step, choose an appropriate service plan. If needed, you will be able to upgrade or downgrade the plan after creation. - -![Choose plan](images/kafka_solution_business.v2.png){.thumbnail} - -Please visit the [capabilities page](/products/public-cloud-data-analytics) of your selected analytics service for detailed information on each plan's properties. - -#### Select the instance - -Choose the instance type for the nodes of your service, you will be able to change it afterward. The number of nodes depends on the plan previously chosen. - -![Choose instance](images/kafka_features.v2.png){.thumbnail} - -#### Select the storage - -Storage could be scaled up to 3 time the base storage. - -![Choose storage](images/kafka_storage.v2.png){.thumbnail} - -#### Configure your options - -Choose the network options for your service and whitelist the IP addresses that will access the service. - -![Configure options](images/kafka_options.v2.png){.thumbnail} - -#### Review and confirm - -A summary of your order is display to help you review your service configuration. - -![Review order](images/kafka_configuration.1.v2.png){.thumbnail} - -The components of the price is also summarized with a monthly estimation. - -![Review pricing](images/kafka_configuration.2.v2.png){.thumbnail} - -Click the `API and Terraform equivalent`{.action} button to open the following window: - -![API and Terraform equivalent](images/kafka_configuration.3.v2.png){.thumbnail} - -The informations displayed in this window could help you automate your service creation with the [OVHcloud API](/pages/manage_and_operate/api/first-steps) or the OVHcloud Terraform Provider. - -When you are ready click the `Order`{.action} button to create your service. -In a matter of minutes, your new Apache Kafka service will be deployed. -Messages in the OVHcloud Control Panel will inform you when the streaming tool is ready to use. - -### Configure the Apache Kafka service - -Once the Public Cloud Databases for Kafka service is up and running, you will have to define at least one user and one authorised IP (if not already provided during the order) in order to fully connect to the service (as producer or consumer). - -![Kafka Concept](images/kafka_concept.png){.thumbnail} - -The `Dashboard`{.action} tab automatically updates when your service is ready. - -![Kafka General information](images/kafka_cluster_ready_to_configure.v2.png){.thumbnail} - -#### Mandatory: Set up a user - -Switch to the `Users`{.action} tab. An admin user name `avnadmin` is preconfigured during the service installation. - -![Users](images/kafka_users.v2.png){.thumbnail} - -You can add more users by clicking the `Add user`{.action} button. - -![Add a user](images/kafka_add_user.v2.png){.thumbnail} - -Enter a username, then click `Create User`{.action}. - -Passwords need to be reset from the `Users`{.action} table. - -![Password reset](images/kafka_user_password_reset1.v2.png){.thumbnail} - -#### Mandatory: Configure authorised IPs - -> [!warning] -> For security reasons the default network configuration doesn't allow any incoming connections. It is thus critical to authorize the suitable IP addresses in order to successfully access your Kafka cluster. - -If you did not define the authorised IPs during the order you could do it in the `Configuration`{.action} tab. At least one IP address must be authorised here before you can connect to your database. - -![Authorised IP](images/kafka_authorized_ip.v2.png){.thumbnail} - -Add the IP address of your computer by using the `Current IP`{.action} button. -You will be able to remove IPs from the table afterward. - -![Add IP](images/kafka_add_ip.v2.png){.thumbnail} - -Your Apache Kafka service is now fully accessible! -Optionally, you can configure access control lists (ACL) for granular permissions and create something called topics, as shown below. - -#### Optional: Create Kafka topics - -Topics can be seen as categories, allowing you to organize your Kafka records. Producers write to topics, and consumers read from topics. - -To create Kafka topics, first go to the `Topics`{.action} tab then click on the `Add a topic`{.action} button: - -![Add a topic](images/kafka_topics.v2.png){.thumbnail} - -In advanced configuration you can change the default value for the following parameters: - -- Minimum in-sync replica (2 by default) -- Partitions (1 partition by default) -- Replication (3 brokers by default) -- Retention size in bytes (-1: no limitation by default) -- Retention time in hours (-1: no limitation by default) -- Deletion policy - -![Create a topic](images/kafka_create_topic.v2.png){.thumbnail} - -#### Optional: Configure ACLs on topics - -Kafka supports access control lists (ACLs) to manage permissions on topics. This approach allows you to limit the operations that are available to specific connections and to restrict access to certain data sets, which improves the security of your data. - -By default the admin user has access to all topics with admin privileges. You can define some additional ACLs for all users / topics, by clicking on the `Add an ACL`{.action} button from the `ACL`{.action} tab: - -![Enable ACLs](images/kafka_acl.v2.png){.thumbnail} - -For a particular user, and one topic (or all with '*'), define the ACL with the following permissions: - -- **admin**: full access to APIs and topic -- **read**: allow only searching and retrieving data from a topic -- **write**: allow updating, adding, and deleting data from a topic -- **readwrite**: full access to the topic - -![Define ACLs](images/kafka_add_entry1.v2.png){.thumbnail} - -*Note*: Write permission allows the service user to create new indexes that match the pattern, but it does not allow deletion of those indexes. - -When multiple rules match, they are applied in the order listed above. If no rules match, access is denied. - -### First CLI connection - -> [!warning] -> Verify that the IP address visible from your browser application is part of the "Authorised IPs" defined for this Kafka service. -> -> Check also that the user has granted ACLs for the target topics. - -#### Download server and user certificates - -In order to connect to the Apache Kafka service, it is required to use server and user certificates. - -##### Server certificate - -The server CA (*Certificate Authority*) certificate can be downloaded from the `Dashboard`{.action} tab: - -![Kafka server certificate](images/kafka_get_server_certificate.v2.png){.thumbnail} - -##### User certificate and access key - -The user certificate and the user access key can be downloaded from the `Users`{.action} tab: - -![User informations](images/kafka_user_certificate_and_access_key.v2.png){.thumbnail} - -#### Install an Apache Kafka CLI - -As part of the Apache Kafka official installation, you will get different scripts that will also allow you to connect to Kafka in a Java 8+ environment: [Apache Kafka Official Quickstart](https://kafka.apache.org/quickstart). - -We propose to use a generic and more lightweight (does not require a JVM) producer and consumer client instead: `Kcat` (formerly known as `kafkacat`). - -##### **Install Kcat** - -For this client installation, please follow the instructions available at: [Kafkacat Official Github](https://github.com/edenhill/kcat). - -##### **Kcat configuration file** - -Let's create a configuration file to simplify the CLI commands to act as Kafka Producer and Consumer: - -kafkacat.conf : - -```text -bootstrap.servers=kafka-f411d2ae-f411d2ae.database.cloud.ovh.net:20186 -enable.ssl.certificate.verification=false -ssl.ca.location=/home/user/kafkacat/ca.pem -security.protocol=ssl -ssl.key.location=/home/user/kafkacat/service.key -ssl.certificate.location=/home/user/kafkacat/service.cert -``` - -In our example, the cluster address and port are **kafka-f411d2ae-f411d2ae.database.cloud.ovh.net:20186** and the previously downloaded CA certificates are in the **/home/user/kafkacat/** folder. - -Change theses values according to your own configuration. - -##### **Kafka producer** - -For this first example let's push the "test-message-key" and its "test-message-content" to the "my-topic" topic. - -```bash -echo test-message-content | kcat -F kafkacat.conf -P -t my-topic -k test-message-key -``` - -*Note*: depending on the installed binary, the CLI command can be either **kcat** or **kafkacat**. - -##### **Kafka consumer** - -The data can be retrieved from "my-topic". - -```bash -kcat -F kafkacat.conf -C -t my-topic -o -1 -e -``` - -*Note*: depending on the installed binary, the CLI command can be either **kcat** or **kafkacat**. - -## Conclusion - -Congratulations, you now have an up and running Apache Kafka cluster, fully managed and secured. You are able to push and retrieve data easily via CLI. +Explore practical guidance on how to configure your OVHcloud Public Cloud Kafka cluster to accept incoming connections covering access setup via the control panel or API and essential network settings at: [Configure Kafka cluster connections](/pages/public_cloud/data_analytics/analytics/kafka_incoming_connections) ## Go further -[Kafka capabilities](/pages/public_cloud/public_cloud_databases/kafka_01_capabilities) - [Kafka Official documentation](https://kafka.apache.org/documentation/) [Kafka clients](https://cwiki.apache.org/confluence/display/KAFKA/Clients) diff --git a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.en-ie.md b/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.en-ie.md index 45123c53fb2..efe7b316b67 100644 --- a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.en-ie.md +++ b/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.en-ie.md @@ -1,256 +1,35 @@ --- title: Kafka - Getting started -excerpt: Find out how to set up and manage your Kafka service -updated: 2025-08-04 +excerpt: Discover Kafka on OVHcloud Public Cloud +updated: 2025-08-25 --- -## Objectives +Kafka on OVHcloud is a fully managed, distributed data-streaming platform available through OVHcloud’s Public Cloud. Built on the open-source strength of Apache Kafka, it’s crafted for event-driven applications, near-real-time data transfer, streaming analytics, and high-throughput pipelines. Your data is stored in organized topics and partitions, enabling OVHcloud to handle multiple simultaneous data flows with scalability and resilience. With intuitive management via the OVHcloud Control Panel (or API/Terraform), you can provision, configure, and deploy Kafka clusters in minutes, offloading infrastructure management and keeping focus squarely on your data-driven use cases. -Apache Kafka is an open-source and highly resilient event streaming platform based on 3 main capabilities: +## Most viewed resources -- write or read data to/from stream events; -- store streams of events; -- process streams of events. +### Product page -You can get more information on Kafka from the [official Kafka website](https://kafka.apache.org/intro){.external}. +Discover how OVHcloud's Managed Apache Kafka® service lets you deploy and manage full Apache Kafka clusters within the Public Cloud complete with scalable topics and partitions, integrated tools like Schema Registry and high availability, plus support for Terraform, private networking, and competitive pay-as-you-go pricing at: [Kafka product page](/links/public-cloud/analytics-kafka) -This guide explains how to successfully configure Public Cloud Databases for Kafka via the OVHcloud Control Panel. +### Pricing -## Requirements +Explore a comprehensive breakdown of OVHcloud’s Public Cloud pricing, where you’ll find detailed hourly and monthly rates across services like virtual instances, managed databases, streaming and analytics tools—including plans for Kafka at: [Kafka pricing](/links/public-cloud/prices-kafka) -- access to the [OVHcloud Control Panel](/links/manager) -- a [Public Cloud project](/links/public-cloud/public-cloud) in your OVHcloud account +### Capabilities -## Instructions +Discover the technical capabilities and limitations of OVHcloud's managed Apache Kafka with detailed guidance on supported versions, resource limits, networking, logging, topics, users, and much more—on this support page: [Kafka capabilities and limitations](/pages/public_cloud/public_cloud_databases/kafka_01_capabilities) -### Subscribe to the service +### Create a cluster -Log in to your [OVHcloud Control Panel](/links/manager) and switch to `Public Cloud`{.action} in the top navigation bar. After selecting your Public Cloud project, click on `Data Streaming`{.action} in the left-hand navigation bar under **Databases & Analytics**. +Explore step-by-step instructions on how to create a managed Kafka database cluster on OVHcloud Public Cloud via the Control Panel, datacentre selection, plan, instance configuration, storage sizing, and cluster setup options at: [Create a Kafka cluster](/pages/public_cloud/data_analytics/analytics/kafka_create_cluster) -Click the `Create a service`{.action} button. +### Configure cluster for incoming connections -#### Select your analytics service - -Click on the type of analytics service you want to use and its version. -A random name is generated for your service that can change in this step or later. - -![Choose data streaming service](images/kafka_db_type.v2.png){.thumbnail} - -#### Select a datacentre - -Choose the geographical region of the datacentre where your service will be hosted and the deployment mode (1-AZ vs 3-AZ). - -![Choose a datacentre](images/kafka_region.v2.png){.thumbnail} - -#### Select a plan - -In this step, choose an appropriate service plan. If needed, you will be able to upgrade or downgrade the plan after creation. - -![Choose plan](images/kafka_solution_business.v2.png){.thumbnail} - -Please visit the [capabilities page](/products/public-cloud-data-analytics) of your selected analytics service for detailed information on each plan's properties. - -#### Select the instance - -Choose the instance type for the nodes of your service, you will be able to change it afterward. The number of nodes depends on the plan previously chosen. - -![Choose instance](images/kafka_features.v2.png){.thumbnail} - -#### Select the storage - -Storage could be scaled up to 3 time the base storage. - -![Choose storage](images/kafka_storage.v2.png){.thumbnail} - -#### Configure your options - -Choose the network options for your service and whitelist the IP addresses that will access the service. - -![Configure options](images/kafka_options.v2.png){.thumbnail} - -#### Review and confirm - -A summary of your order is display to help you review your service configuration. - -![Review order](images/kafka_configuration.1.v2.png){.thumbnail} - -The components of the price is also summarized with a monthly estimation. - -![Review pricing](images/kafka_configuration.2.v2.png){.thumbnail} - -Click the `API and Terraform equivalent`{.action} button to open the following window: - -![API and Terraform equivalent](images/kafka_configuration.3.v2.png){.thumbnail} - -The informations displayed in this window could help you automate your service creation with the [OVHcloud API](/pages/manage_and_operate/api/first-steps) or the OVHcloud Terraform Provider. - -When you are ready click the `Order`{.action} button to create your service. -In a matter of minutes, your new Apache Kafka service will be deployed. -Messages in the OVHcloud Control Panel will inform you when the streaming tool is ready to use. - -### Configure the Apache Kafka service - -Once the Public Cloud Databases for Kafka service is up and running, you will have to define at least one user and one authorised IP (if not already provided during the order) in order to fully connect to the service (as producer or consumer). - -![Kafka Concept](images/kafka_concept.png){.thumbnail} - -The `Dashboard`{.action} tab automatically updates when your service is ready. - -![Kafka General information](images/kafka_cluster_ready_to_configure.v2.png){.thumbnail} - -#### Mandatory: Set up a user - -Switch to the `Users`{.action} tab. An admin user name `avnadmin` is preconfigured during the service installation. - -![Users](images/kafka_users.v2.png){.thumbnail} - -You can add more users by clicking the `Add user`{.action} button. - -![Add a user](images/kafka_add_user.v2.png){.thumbnail} - -Enter a username, then click `Create User`{.action}. - -Passwords need to be reset from the `Users`{.action} table. - -![Password reset](images/kafka_user_password_reset1.v2.png){.thumbnail} - -#### Mandatory: Configure authorised IPs - -> [!warning] -> For security reasons the default network configuration doesn't allow any incoming connections. It is thus critical to authorize the suitable IP addresses in order to successfully access your Kafka cluster. - -If you did not define the authorised IPs during the order you could do it in the `Configuration`{.action} tab. At least one IP address must be authorised here before you can connect to your database. - -![Authorised IP](images/kafka_authorized_ip.v2.png){.thumbnail} - -Add the IP address of your computer by using the `Current IP`{.action} button. -You will be able to remove IPs from the table afterward. - -![Add IP](images/kafka_add_ip.v2.png){.thumbnail} - -Your Apache Kafka service is now fully accessible! -Optionally, you can configure access control lists (ACL) for granular permissions and create something called topics, as shown below. - -#### Optional: Create Kafka topics - -Topics can be seen as categories, allowing you to organize your Kafka records. Producers write to topics, and consumers read from topics. - -To create Kafka topics, first go to the `Topics`{.action} tab then click on the `Add a topic`{.action} button: - -![Add a topic](images/kafka_topics.v2.png){.thumbnail} - -In advanced configuration you can change the default value for the following parameters: - -- Minimum in-sync replica (2 by default) -- Partitions (1 partition by default) -- Replication (3 brokers by default) -- Retention size in bytes (-1: no limitation by default) -- Retention time in hours (-1: no limitation by default) -- Deletion policy - -![Create a topic](images/kafka_create_topic.v2.png){.thumbnail} - -#### Optional: Configure ACLs on topics - -Kafka supports access control lists (ACLs) to manage permissions on topics. This approach allows you to limit the operations that are available to specific connections and to restrict access to certain data sets, which improves the security of your data. - -By default the admin user has access to all topics with admin privileges. You can define some additional ACLs for all users / topics, by clicking on the `Add an ACL`{.action} button from the `ACL`{.action} tab: - -![Enable ACLs](images/kafka_acl.v2.png){.thumbnail} - -For a particular user, and one topic (or all with '*'), define the ACL with the following permissions: - -- **admin**: full access to APIs and topic -- **read**: allow only searching and retrieving data from a topic -- **write**: allow updating, adding, and deleting data from a topic -- **readwrite**: full access to the topic - -![Define ACLs](images/kafka_add_entry1.v2.png){.thumbnail} - -*Note*: Write permission allows the service user to create new indexes that match the pattern, but it does not allow deletion of those indexes. - -When multiple rules match, they are applied in the order listed above. If no rules match, access is denied. - -### First CLI connection - -> [!warning] -> Verify that the IP address visible from your browser application is part of the "Authorised IPs" defined for this Kafka service. -> -> Check also that the user has granted ACLs for the target topics. - -#### Download server and user certificates - -In order to connect to the Apache Kafka service, it is required to use server and user certificates. - -##### Server certificate - -The server CA (*Certificate Authority*) certificate can be downloaded from the `Dashboard`{.action} tab: - -![Kafka server certificate](images/kafka_get_server_certificate.v2.png){.thumbnail} - -##### User certificate and access key - -The user certificate and the user access key can be downloaded from the `Users`{.action} tab: - -![User informations](images/kafka_user_certificate_and_access_key.v2.png){.thumbnail} - -#### Install an Apache Kafka CLI - -As part of the Apache Kafka official installation, you will get different scripts that will also allow you to connect to Kafka in a Java 8+ environment: [Apache Kafka Official Quickstart](https://kafka.apache.org/quickstart). - -We propose to use a generic and more lightweight (does not require a JVM) producer and consumer client instead: `Kcat` (formerly known as `kafkacat`). - -##### **Install Kcat** - -For this client installation, please follow the instructions available at: [Kafkacat Official Github](https://github.com/edenhill/kcat). - -##### **Kcat configuration file** - -Let's create a configuration file to simplify the CLI commands to act as Kafka Producer and Consumer: - -kafkacat.conf : - -```text -bootstrap.servers=kafka-f411d2ae-f411d2ae.database.cloud.ovh.net:20186 -enable.ssl.certificate.verification=false -ssl.ca.location=/home/user/kafkacat/ca.pem -security.protocol=ssl -ssl.key.location=/home/user/kafkacat/service.key -ssl.certificate.location=/home/user/kafkacat/service.cert -``` - -In our example, the cluster address and port are **kafka-f411d2ae-f411d2ae.database.cloud.ovh.net:20186** and the previously downloaded CA certificates are in the **/home/user/kafkacat/** folder. - -Change theses values according to your own configuration. - -##### **Kafka producer** - -For this first example let's push the "test-message-key" and its "test-message-content" to the "my-topic" topic. - -```bash -echo test-message-content | kcat -F kafkacat.conf -P -t my-topic -k test-message-key -``` - -*Note*: depending on the installed binary, the CLI command can be either **kcat** or **kafkacat**. - -##### **Kafka consumer** - -The data can be retrieved from "my-topic". - -```bash -kcat -F kafkacat.conf -C -t my-topic -o -1 -e -``` - -*Note*: depending on the installed binary, the CLI command can be either **kcat** or **kafkacat**. - -## Conclusion - -Congratulations, you now have an up and running Apache Kafka cluster, fully managed and secured. You are able to push and retrieve data easily via CLI. +Explore practical guidance on how to configure your OVHcloud Public Cloud Kafka cluster to accept incoming connections covering access setup via the control panel or API and essential network settings at: [Configure Kafka cluster connections](/pages/public_cloud/data_analytics/analytics/kafka_incoming_connections) ## Go further -[Kafka capabilities](/pages/public_cloud/public_cloud_databases/kafka_01_capabilities) - [Kafka Official documentation](https://kafka.apache.org/documentation/) [Kafka clients](https://cwiki.apache.org/confluence/display/KAFKA/Clients) diff --git a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.en-sg.md b/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.en-sg.md index 45123c53fb2..efe7b316b67 100644 --- a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.en-sg.md +++ b/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.en-sg.md @@ -1,256 +1,35 @@ --- title: Kafka - Getting started -excerpt: Find out how to set up and manage your Kafka service -updated: 2025-08-04 +excerpt: Discover Kafka on OVHcloud Public Cloud +updated: 2025-08-25 --- -## Objectives +Kafka on OVHcloud is a fully managed, distributed data-streaming platform available through OVHcloud’s Public Cloud. Built on the open-source strength of Apache Kafka, it’s crafted for event-driven applications, near-real-time data transfer, streaming analytics, and high-throughput pipelines. Your data is stored in organized topics and partitions, enabling OVHcloud to handle multiple simultaneous data flows with scalability and resilience. With intuitive management via the OVHcloud Control Panel (or API/Terraform), you can provision, configure, and deploy Kafka clusters in minutes, offloading infrastructure management and keeping focus squarely on your data-driven use cases. -Apache Kafka is an open-source and highly resilient event streaming platform based on 3 main capabilities: +## Most viewed resources -- write or read data to/from stream events; -- store streams of events; -- process streams of events. +### Product page -You can get more information on Kafka from the [official Kafka website](https://kafka.apache.org/intro){.external}. +Discover how OVHcloud's Managed Apache Kafka® service lets you deploy and manage full Apache Kafka clusters within the Public Cloud complete with scalable topics and partitions, integrated tools like Schema Registry and high availability, plus support for Terraform, private networking, and competitive pay-as-you-go pricing at: [Kafka product page](/links/public-cloud/analytics-kafka) -This guide explains how to successfully configure Public Cloud Databases for Kafka via the OVHcloud Control Panel. +### Pricing -## Requirements +Explore a comprehensive breakdown of OVHcloud’s Public Cloud pricing, where you’ll find detailed hourly and monthly rates across services like virtual instances, managed databases, streaming and analytics tools—including plans for Kafka at: [Kafka pricing](/links/public-cloud/prices-kafka) -- access to the [OVHcloud Control Panel](/links/manager) -- a [Public Cloud project](/links/public-cloud/public-cloud) in your OVHcloud account +### Capabilities -## Instructions +Discover the technical capabilities and limitations of OVHcloud's managed Apache Kafka with detailed guidance on supported versions, resource limits, networking, logging, topics, users, and much more—on this support page: [Kafka capabilities and limitations](/pages/public_cloud/public_cloud_databases/kafka_01_capabilities) -### Subscribe to the service +### Create a cluster -Log in to your [OVHcloud Control Panel](/links/manager) and switch to `Public Cloud`{.action} in the top navigation bar. After selecting your Public Cloud project, click on `Data Streaming`{.action} in the left-hand navigation bar under **Databases & Analytics**. +Explore step-by-step instructions on how to create a managed Kafka database cluster on OVHcloud Public Cloud via the Control Panel, datacentre selection, plan, instance configuration, storage sizing, and cluster setup options at: [Create a Kafka cluster](/pages/public_cloud/data_analytics/analytics/kafka_create_cluster) -Click the `Create a service`{.action} button. +### Configure cluster for incoming connections -#### Select your analytics service - -Click on the type of analytics service you want to use and its version. -A random name is generated for your service that can change in this step or later. - -![Choose data streaming service](images/kafka_db_type.v2.png){.thumbnail} - -#### Select a datacentre - -Choose the geographical region of the datacentre where your service will be hosted and the deployment mode (1-AZ vs 3-AZ). - -![Choose a datacentre](images/kafka_region.v2.png){.thumbnail} - -#### Select a plan - -In this step, choose an appropriate service plan. If needed, you will be able to upgrade or downgrade the plan after creation. - -![Choose plan](images/kafka_solution_business.v2.png){.thumbnail} - -Please visit the [capabilities page](/products/public-cloud-data-analytics) of your selected analytics service for detailed information on each plan's properties. - -#### Select the instance - -Choose the instance type for the nodes of your service, you will be able to change it afterward. The number of nodes depends on the plan previously chosen. - -![Choose instance](images/kafka_features.v2.png){.thumbnail} - -#### Select the storage - -Storage could be scaled up to 3 time the base storage. - -![Choose storage](images/kafka_storage.v2.png){.thumbnail} - -#### Configure your options - -Choose the network options for your service and whitelist the IP addresses that will access the service. - -![Configure options](images/kafka_options.v2.png){.thumbnail} - -#### Review and confirm - -A summary of your order is display to help you review your service configuration. - -![Review order](images/kafka_configuration.1.v2.png){.thumbnail} - -The components of the price is also summarized with a monthly estimation. - -![Review pricing](images/kafka_configuration.2.v2.png){.thumbnail} - -Click the `API and Terraform equivalent`{.action} button to open the following window: - -![API and Terraform equivalent](images/kafka_configuration.3.v2.png){.thumbnail} - -The informations displayed in this window could help you automate your service creation with the [OVHcloud API](/pages/manage_and_operate/api/first-steps) or the OVHcloud Terraform Provider. - -When you are ready click the `Order`{.action} button to create your service. -In a matter of minutes, your new Apache Kafka service will be deployed. -Messages in the OVHcloud Control Panel will inform you when the streaming tool is ready to use. - -### Configure the Apache Kafka service - -Once the Public Cloud Databases for Kafka service is up and running, you will have to define at least one user and one authorised IP (if not already provided during the order) in order to fully connect to the service (as producer or consumer). - -![Kafka Concept](images/kafka_concept.png){.thumbnail} - -The `Dashboard`{.action} tab automatically updates when your service is ready. - -![Kafka General information](images/kafka_cluster_ready_to_configure.v2.png){.thumbnail} - -#### Mandatory: Set up a user - -Switch to the `Users`{.action} tab. An admin user name `avnadmin` is preconfigured during the service installation. - -![Users](images/kafka_users.v2.png){.thumbnail} - -You can add more users by clicking the `Add user`{.action} button. - -![Add a user](images/kafka_add_user.v2.png){.thumbnail} - -Enter a username, then click `Create User`{.action}. - -Passwords need to be reset from the `Users`{.action} table. - -![Password reset](images/kafka_user_password_reset1.v2.png){.thumbnail} - -#### Mandatory: Configure authorised IPs - -> [!warning] -> For security reasons the default network configuration doesn't allow any incoming connections. It is thus critical to authorize the suitable IP addresses in order to successfully access your Kafka cluster. - -If you did not define the authorised IPs during the order you could do it in the `Configuration`{.action} tab. At least one IP address must be authorised here before you can connect to your database. - -![Authorised IP](images/kafka_authorized_ip.v2.png){.thumbnail} - -Add the IP address of your computer by using the `Current IP`{.action} button. -You will be able to remove IPs from the table afterward. - -![Add IP](images/kafka_add_ip.v2.png){.thumbnail} - -Your Apache Kafka service is now fully accessible! -Optionally, you can configure access control lists (ACL) for granular permissions and create something called topics, as shown below. - -#### Optional: Create Kafka topics - -Topics can be seen as categories, allowing you to organize your Kafka records. Producers write to topics, and consumers read from topics. - -To create Kafka topics, first go to the `Topics`{.action} tab then click on the `Add a topic`{.action} button: - -![Add a topic](images/kafka_topics.v2.png){.thumbnail} - -In advanced configuration you can change the default value for the following parameters: - -- Minimum in-sync replica (2 by default) -- Partitions (1 partition by default) -- Replication (3 brokers by default) -- Retention size in bytes (-1: no limitation by default) -- Retention time in hours (-1: no limitation by default) -- Deletion policy - -![Create a topic](images/kafka_create_topic.v2.png){.thumbnail} - -#### Optional: Configure ACLs on topics - -Kafka supports access control lists (ACLs) to manage permissions on topics. This approach allows you to limit the operations that are available to specific connections and to restrict access to certain data sets, which improves the security of your data. - -By default the admin user has access to all topics with admin privileges. You can define some additional ACLs for all users / topics, by clicking on the `Add an ACL`{.action} button from the `ACL`{.action} tab: - -![Enable ACLs](images/kafka_acl.v2.png){.thumbnail} - -For a particular user, and one topic (or all with '*'), define the ACL with the following permissions: - -- **admin**: full access to APIs and topic -- **read**: allow only searching and retrieving data from a topic -- **write**: allow updating, adding, and deleting data from a topic -- **readwrite**: full access to the topic - -![Define ACLs](images/kafka_add_entry1.v2.png){.thumbnail} - -*Note*: Write permission allows the service user to create new indexes that match the pattern, but it does not allow deletion of those indexes. - -When multiple rules match, they are applied in the order listed above. If no rules match, access is denied. - -### First CLI connection - -> [!warning] -> Verify that the IP address visible from your browser application is part of the "Authorised IPs" defined for this Kafka service. -> -> Check also that the user has granted ACLs for the target topics. - -#### Download server and user certificates - -In order to connect to the Apache Kafka service, it is required to use server and user certificates. - -##### Server certificate - -The server CA (*Certificate Authority*) certificate can be downloaded from the `Dashboard`{.action} tab: - -![Kafka server certificate](images/kafka_get_server_certificate.v2.png){.thumbnail} - -##### User certificate and access key - -The user certificate and the user access key can be downloaded from the `Users`{.action} tab: - -![User informations](images/kafka_user_certificate_and_access_key.v2.png){.thumbnail} - -#### Install an Apache Kafka CLI - -As part of the Apache Kafka official installation, you will get different scripts that will also allow you to connect to Kafka in a Java 8+ environment: [Apache Kafka Official Quickstart](https://kafka.apache.org/quickstart). - -We propose to use a generic and more lightweight (does not require a JVM) producer and consumer client instead: `Kcat` (formerly known as `kafkacat`). - -##### **Install Kcat** - -For this client installation, please follow the instructions available at: [Kafkacat Official Github](https://github.com/edenhill/kcat). - -##### **Kcat configuration file** - -Let's create a configuration file to simplify the CLI commands to act as Kafka Producer and Consumer: - -kafkacat.conf : - -```text -bootstrap.servers=kafka-f411d2ae-f411d2ae.database.cloud.ovh.net:20186 -enable.ssl.certificate.verification=false -ssl.ca.location=/home/user/kafkacat/ca.pem -security.protocol=ssl -ssl.key.location=/home/user/kafkacat/service.key -ssl.certificate.location=/home/user/kafkacat/service.cert -``` - -In our example, the cluster address and port are **kafka-f411d2ae-f411d2ae.database.cloud.ovh.net:20186** and the previously downloaded CA certificates are in the **/home/user/kafkacat/** folder. - -Change theses values according to your own configuration. - -##### **Kafka producer** - -For this first example let's push the "test-message-key" and its "test-message-content" to the "my-topic" topic. - -```bash -echo test-message-content | kcat -F kafkacat.conf -P -t my-topic -k test-message-key -``` - -*Note*: depending on the installed binary, the CLI command can be either **kcat** or **kafkacat**. - -##### **Kafka consumer** - -The data can be retrieved from "my-topic". - -```bash -kcat -F kafkacat.conf -C -t my-topic -o -1 -e -``` - -*Note*: depending on the installed binary, the CLI command can be either **kcat** or **kafkacat**. - -## Conclusion - -Congratulations, you now have an up and running Apache Kafka cluster, fully managed and secured. You are able to push and retrieve data easily via CLI. +Explore practical guidance on how to configure your OVHcloud Public Cloud Kafka cluster to accept incoming connections covering access setup via the control panel or API and essential network settings at: [Configure Kafka cluster connections](/pages/public_cloud/data_analytics/analytics/kafka_incoming_connections) ## Go further -[Kafka capabilities](/pages/public_cloud/public_cloud_databases/kafka_01_capabilities) - [Kafka Official documentation](https://kafka.apache.org/documentation/) [Kafka clients](https://cwiki.apache.org/confluence/display/KAFKA/Clients) diff --git a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.en-us.md b/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.en-us.md index 45123c53fb2..efe7b316b67 100644 --- a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.en-us.md +++ b/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.en-us.md @@ -1,256 +1,35 @@ --- title: Kafka - Getting started -excerpt: Find out how to set up and manage your Kafka service -updated: 2025-08-04 +excerpt: Discover Kafka on OVHcloud Public Cloud +updated: 2025-08-25 --- -## Objectives +Kafka on OVHcloud is a fully managed, distributed data-streaming platform available through OVHcloud’s Public Cloud. Built on the open-source strength of Apache Kafka, it’s crafted for event-driven applications, near-real-time data transfer, streaming analytics, and high-throughput pipelines. Your data is stored in organized topics and partitions, enabling OVHcloud to handle multiple simultaneous data flows with scalability and resilience. With intuitive management via the OVHcloud Control Panel (or API/Terraform), you can provision, configure, and deploy Kafka clusters in minutes, offloading infrastructure management and keeping focus squarely on your data-driven use cases. -Apache Kafka is an open-source and highly resilient event streaming platform based on 3 main capabilities: +## Most viewed resources -- write or read data to/from stream events; -- store streams of events; -- process streams of events. +### Product page -You can get more information on Kafka from the [official Kafka website](https://kafka.apache.org/intro){.external}. +Discover how OVHcloud's Managed Apache Kafka® service lets you deploy and manage full Apache Kafka clusters within the Public Cloud complete with scalable topics and partitions, integrated tools like Schema Registry and high availability, plus support for Terraform, private networking, and competitive pay-as-you-go pricing at: [Kafka product page](/links/public-cloud/analytics-kafka) -This guide explains how to successfully configure Public Cloud Databases for Kafka via the OVHcloud Control Panel. +### Pricing -## Requirements +Explore a comprehensive breakdown of OVHcloud’s Public Cloud pricing, where you’ll find detailed hourly and monthly rates across services like virtual instances, managed databases, streaming and analytics tools—including plans for Kafka at: [Kafka pricing](/links/public-cloud/prices-kafka) -- access to the [OVHcloud Control Panel](/links/manager) -- a [Public Cloud project](/links/public-cloud/public-cloud) in your OVHcloud account +### Capabilities -## Instructions +Discover the technical capabilities and limitations of OVHcloud's managed Apache Kafka with detailed guidance on supported versions, resource limits, networking, logging, topics, users, and much more—on this support page: [Kafka capabilities and limitations](/pages/public_cloud/public_cloud_databases/kafka_01_capabilities) -### Subscribe to the service +### Create a cluster -Log in to your [OVHcloud Control Panel](/links/manager) and switch to `Public Cloud`{.action} in the top navigation bar. After selecting your Public Cloud project, click on `Data Streaming`{.action} in the left-hand navigation bar under **Databases & Analytics**. +Explore step-by-step instructions on how to create a managed Kafka database cluster on OVHcloud Public Cloud via the Control Panel, datacentre selection, plan, instance configuration, storage sizing, and cluster setup options at: [Create a Kafka cluster](/pages/public_cloud/data_analytics/analytics/kafka_create_cluster) -Click the `Create a service`{.action} button. +### Configure cluster for incoming connections -#### Select your analytics service - -Click on the type of analytics service you want to use and its version. -A random name is generated for your service that can change in this step or later. - -![Choose data streaming service](images/kafka_db_type.v2.png){.thumbnail} - -#### Select a datacentre - -Choose the geographical region of the datacentre where your service will be hosted and the deployment mode (1-AZ vs 3-AZ). - -![Choose a datacentre](images/kafka_region.v2.png){.thumbnail} - -#### Select a plan - -In this step, choose an appropriate service plan. If needed, you will be able to upgrade or downgrade the plan after creation. - -![Choose plan](images/kafka_solution_business.v2.png){.thumbnail} - -Please visit the [capabilities page](/products/public-cloud-data-analytics) of your selected analytics service for detailed information on each plan's properties. - -#### Select the instance - -Choose the instance type for the nodes of your service, you will be able to change it afterward. The number of nodes depends on the plan previously chosen. - -![Choose instance](images/kafka_features.v2.png){.thumbnail} - -#### Select the storage - -Storage could be scaled up to 3 time the base storage. - -![Choose storage](images/kafka_storage.v2.png){.thumbnail} - -#### Configure your options - -Choose the network options for your service and whitelist the IP addresses that will access the service. - -![Configure options](images/kafka_options.v2.png){.thumbnail} - -#### Review and confirm - -A summary of your order is display to help you review your service configuration. - -![Review order](images/kafka_configuration.1.v2.png){.thumbnail} - -The components of the price is also summarized with a monthly estimation. - -![Review pricing](images/kafka_configuration.2.v2.png){.thumbnail} - -Click the `API and Terraform equivalent`{.action} button to open the following window: - -![API and Terraform equivalent](images/kafka_configuration.3.v2.png){.thumbnail} - -The informations displayed in this window could help you automate your service creation with the [OVHcloud API](/pages/manage_and_operate/api/first-steps) or the OVHcloud Terraform Provider. - -When you are ready click the `Order`{.action} button to create your service. -In a matter of minutes, your new Apache Kafka service will be deployed. -Messages in the OVHcloud Control Panel will inform you when the streaming tool is ready to use. - -### Configure the Apache Kafka service - -Once the Public Cloud Databases for Kafka service is up and running, you will have to define at least one user and one authorised IP (if not already provided during the order) in order to fully connect to the service (as producer or consumer). - -![Kafka Concept](images/kafka_concept.png){.thumbnail} - -The `Dashboard`{.action} tab automatically updates when your service is ready. - -![Kafka General information](images/kafka_cluster_ready_to_configure.v2.png){.thumbnail} - -#### Mandatory: Set up a user - -Switch to the `Users`{.action} tab. An admin user name `avnadmin` is preconfigured during the service installation. - -![Users](images/kafka_users.v2.png){.thumbnail} - -You can add more users by clicking the `Add user`{.action} button. - -![Add a user](images/kafka_add_user.v2.png){.thumbnail} - -Enter a username, then click `Create User`{.action}. - -Passwords need to be reset from the `Users`{.action} table. - -![Password reset](images/kafka_user_password_reset1.v2.png){.thumbnail} - -#### Mandatory: Configure authorised IPs - -> [!warning] -> For security reasons the default network configuration doesn't allow any incoming connections. It is thus critical to authorize the suitable IP addresses in order to successfully access your Kafka cluster. - -If you did not define the authorised IPs during the order you could do it in the `Configuration`{.action} tab. At least one IP address must be authorised here before you can connect to your database. - -![Authorised IP](images/kafka_authorized_ip.v2.png){.thumbnail} - -Add the IP address of your computer by using the `Current IP`{.action} button. -You will be able to remove IPs from the table afterward. - -![Add IP](images/kafka_add_ip.v2.png){.thumbnail} - -Your Apache Kafka service is now fully accessible! -Optionally, you can configure access control lists (ACL) for granular permissions and create something called topics, as shown below. - -#### Optional: Create Kafka topics - -Topics can be seen as categories, allowing you to organize your Kafka records. Producers write to topics, and consumers read from topics. - -To create Kafka topics, first go to the `Topics`{.action} tab then click on the `Add a topic`{.action} button: - -![Add a topic](images/kafka_topics.v2.png){.thumbnail} - -In advanced configuration you can change the default value for the following parameters: - -- Minimum in-sync replica (2 by default) -- Partitions (1 partition by default) -- Replication (3 brokers by default) -- Retention size in bytes (-1: no limitation by default) -- Retention time in hours (-1: no limitation by default) -- Deletion policy - -![Create a topic](images/kafka_create_topic.v2.png){.thumbnail} - -#### Optional: Configure ACLs on topics - -Kafka supports access control lists (ACLs) to manage permissions on topics. This approach allows you to limit the operations that are available to specific connections and to restrict access to certain data sets, which improves the security of your data. - -By default the admin user has access to all topics with admin privileges. You can define some additional ACLs for all users / topics, by clicking on the `Add an ACL`{.action} button from the `ACL`{.action} tab: - -![Enable ACLs](images/kafka_acl.v2.png){.thumbnail} - -For a particular user, and one topic (or all with '*'), define the ACL with the following permissions: - -- **admin**: full access to APIs and topic -- **read**: allow only searching and retrieving data from a topic -- **write**: allow updating, adding, and deleting data from a topic -- **readwrite**: full access to the topic - -![Define ACLs](images/kafka_add_entry1.v2.png){.thumbnail} - -*Note*: Write permission allows the service user to create new indexes that match the pattern, but it does not allow deletion of those indexes. - -When multiple rules match, they are applied in the order listed above. If no rules match, access is denied. - -### First CLI connection - -> [!warning] -> Verify that the IP address visible from your browser application is part of the "Authorised IPs" defined for this Kafka service. -> -> Check also that the user has granted ACLs for the target topics. - -#### Download server and user certificates - -In order to connect to the Apache Kafka service, it is required to use server and user certificates. - -##### Server certificate - -The server CA (*Certificate Authority*) certificate can be downloaded from the `Dashboard`{.action} tab: - -![Kafka server certificate](images/kafka_get_server_certificate.v2.png){.thumbnail} - -##### User certificate and access key - -The user certificate and the user access key can be downloaded from the `Users`{.action} tab: - -![User informations](images/kafka_user_certificate_and_access_key.v2.png){.thumbnail} - -#### Install an Apache Kafka CLI - -As part of the Apache Kafka official installation, you will get different scripts that will also allow you to connect to Kafka in a Java 8+ environment: [Apache Kafka Official Quickstart](https://kafka.apache.org/quickstart). - -We propose to use a generic and more lightweight (does not require a JVM) producer and consumer client instead: `Kcat` (formerly known as `kafkacat`). - -##### **Install Kcat** - -For this client installation, please follow the instructions available at: [Kafkacat Official Github](https://github.com/edenhill/kcat). - -##### **Kcat configuration file** - -Let's create a configuration file to simplify the CLI commands to act as Kafka Producer and Consumer: - -kafkacat.conf : - -```text -bootstrap.servers=kafka-f411d2ae-f411d2ae.database.cloud.ovh.net:20186 -enable.ssl.certificate.verification=false -ssl.ca.location=/home/user/kafkacat/ca.pem -security.protocol=ssl -ssl.key.location=/home/user/kafkacat/service.key -ssl.certificate.location=/home/user/kafkacat/service.cert -``` - -In our example, the cluster address and port are **kafka-f411d2ae-f411d2ae.database.cloud.ovh.net:20186** and the previously downloaded CA certificates are in the **/home/user/kafkacat/** folder. - -Change theses values according to your own configuration. - -##### **Kafka producer** - -For this first example let's push the "test-message-key" and its "test-message-content" to the "my-topic" topic. - -```bash -echo test-message-content | kcat -F kafkacat.conf -P -t my-topic -k test-message-key -``` - -*Note*: depending on the installed binary, the CLI command can be either **kcat** or **kafkacat**. - -##### **Kafka consumer** - -The data can be retrieved from "my-topic". - -```bash -kcat -F kafkacat.conf -C -t my-topic -o -1 -e -``` - -*Note*: depending on the installed binary, the CLI command can be either **kcat** or **kafkacat**. - -## Conclusion - -Congratulations, you now have an up and running Apache Kafka cluster, fully managed and secured. You are able to push and retrieve data easily via CLI. +Explore practical guidance on how to configure your OVHcloud Public Cloud Kafka cluster to accept incoming connections covering access setup via the control panel or API and essential network settings at: [Configure Kafka cluster connections](/pages/public_cloud/data_analytics/analytics/kafka_incoming_connections) ## Go further -[Kafka capabilities](/pages/public_cloud/public_cloud_databases/kafka_01_capabilities) - [Kafka Official documentation](https://kafka.apache.org/documentation/) [Kafka clients](https://cwiki.apache.org/confluence/display/KAFKA/Clients) diff --git a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.es-es.md b/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.es-es.md index 45123c53fb2..efe7b316b67 100644 --- a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.es-es.md +++ b/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.es-es.md @@ -1,256 +1,35 @@ --- title: Kafka - Getting started -excerpt: Find out how to set up and manage your Kafka service -updated: 2025-08-04 +excerpt: Discover Kafka on OVHcloud Public Cloud +updated: 2025-08-25 --- -## Objectives +Kafka on OVHcloud is a fully managed, distributed data-streaming platform available through OVHcloud’s Public Cloud. Built on the open-source strength of Apache Kafka, it’s crafted for event-driven applications, near-real-time data transfer, streaming analytics, and high-throughput pipelines. Your data is stored in organized topics and partitions, enabling OVHcloud to handle multiple simultaneous data flows with scalability and resilience. With intuitive management via the OVHcloud Control Panel (or API/Terraform), you can provision, configure, and deploy Kafka clusters in minutes, offloading infrastructure management and keeping focus squarely on your data-driven use cases. -Apache Kafka is an open-source and highly resilient event streaming platform based on 3 main capabilities: +## Most viewed resources -- write or read data to/from stream events; -- store streams of events; -- process streams of events. +### Product page -You can get more information on Kafka from the [official Kafka website](https://kafka.apache.org/intro){.external}. +Discover how OVHcloud's Managed Apache Kafka® service lets you deploy and manage full Apache Kafka clusters within the Public Cloud complete with scalable topics and partitions, integrated tools like Schema Registry and high availability, plus support for Terraform, private networking, and competitive pay-as-you-go pricing at: [Kafka product page](/links/public-cloud/analytics-kafka) -This guide explains how to successfully configure Public Cloud Databases for Kafka via the OVHcloud Control Panel. +### Pricing -## Requirements +Explore a comprehensive breakdown of OVHcloud’s Public Cloud pricing, where you’ll find detailed hourly and monthly rates across services like virtual instances, managed databases, streaming and analytics tools—including plans for Kafka at: [Kafka pricing](/links/public-cloud/prices-kafka) -- access to the [OVHcloud Control Panel](/links/manager) -- a [Public Cloud project](/links/public-cloud/public-cloud) in your OVHcloud account +### Capabilities -## Instructions +Discover the technical capabilities and limitations of OVHcloud's managed Apache Kafka with detailed guidance on supported versions, resource limits, networking, logging, topics, users, and much more—on this support page: [Kafka capabilities and limitations](/pages/public_cloud/public_cloud_databases/kafka_01_capabilities) -### Subscribe to the service +### Create a cluster -Log in to your [OVHcloud Control Panel](/links/manager) and switch to `Public Cloud`{.action} in the top navigation bar. After selecting your Public Cloud project, click on `Data Streaming`{.action} in the left-hand navigation bar under **Databases & Analytics**. +Explore step-by-step instructions on how to create a managed Kafka database cluster on OVHcloud Public Cloud via the Control Panel, datacentre selection, plan, instance configuration, storage sizing, and cluster setup options at: [Create a Kafka cluster](/pages/public_cloud/data_analytics/analytics/kafka_create_cluster) -Click the `Create a service`{.action} button. +### Configure cluster for incoming connections -#### Select your analytics service - -Click on the type of analytics service you want to use and its version. -A random name is generated for your service that can change in this step or later. - -![Choose data streaming service](images/kafka_db_type.v2.png){.thumbnail} - -#### Select a datacentre - -Choose the geographical region of the datacentre where your service will be hosted and the deployment mode (1-AZ vs 3-AZ). - -![Choose a datacentre](images/kafka_region.v2.png){.thumbnail} - -#### Select a plan - -In this step, choose an appropriate service plan. If needed, you will be able to upgrade or downgrade the plan after creation. - -![Choose plan](images/kafka_solution_business.v2.png){.thumbnail} - -Please visit the [capabilities page](/products/public-cloud-data-analytics) of your selected analytics service for detailed information on each plan's properties. - -#### Select the instance - -Choose the instance type for the nodes of your service, you will be able to change it afterward. The number of nodes depends on the plan previously chosen. - -![Choose instance](images/kafka_features.v2.png){.thumbnail} - -#### Select the storage - -Storage could be scaled up to 3 time the base storage. - -![Choose storage](images/kafka_storage.v2.png){.thumbnail} - -#### Configure your options - -Choose the network options for your service and whitelist the IP addresses that will access the service. - -![Configure options](images/kafka_options.v2.png){.thumbnail} - -#### Review and confirm - -A summary of your order is display to help you review your service configuration. - -![Review order](images/kafka_configuration.1.v2.png){.thumbnail} - -The components of the price is also summarized with a monthly estimation. - -![Review pricing](images/kafka_configuration.2.v2.png){.thumbnail} - -Click the `API and Terraform equivalent`{.action} button to open the following window: - -![API and Terraform equivalent](images/kafka_configuration.3.v2.png){.thumbnail} - -The informations displayed in this window could help you automate your service creation with the [OVHcloud API](/pages/manage_and_operate/api/first-steps) or the OVHcloud Terraform Provider. - -When you are ready click the `Order`{.action} button to create your service. -In a matter of minutes, your new Apache Kafka service will be deployed. -Messages in the OVHcloud Control Panel will inform you when the streaming tool is ready to use. - -### Configure the Apache Kafka service - -Once the Public Cloud Databases for Kafka service is up and running, you will have to define at least one user and one authorised IP (if not already provided during the order) in order to fully connect to the service (as producer or consumer). - -![Kafka Concept](images/kafka_concept.png){.thumbnail} - -The `Dashboard`{.action} tab automatically updates when your service is ready. - -![Kafka General information](images/kafka_cluster_ready_to_configure.v2.png){.thumbnail} - -#### Mandatory: Set up a user - -Switch to the `Users`{.action} tab. An admin user name `avnadmin` is preconfigured during the service installation. - -![Users](images/kafka_users.v2.png){.thumbnail} - -You can add more users by clicking the `Add user`{.action} button. - -![Add a user](images/kafka_add_user.v2.png){.thumbnail} - -Enter a username, then click `Create User`{.action}. - -Passwords need to be reset from the `Users`{.action} table. - -![Password reset](images/kafka_user_password_reset1.v2.png){.thumbnail} - -#### Mandatory: Configure authorised IPs - -> [!warning] -> For security reasons the default network configuration doesn't allow any incoming connections. It is thus critical to authorize the suitable IP addresses in order to successfully access your Kafka cluster. - -If you did not define the authorised IPs during the order you could do it in the `Configuration`{.action} tab. At least one IP address must be authorised here before you can connect to your database. - -![Authorised IP](images/kafka_authorized_ip.v2.png){.thumbnail} - -Add the IP address of your computer by using the `Current IP`{.action} button. -You will be able to remove IPs from the table afterward. - -![Add IP](images/kafka_add_ip.v2.png){.thumbnail} - -Your Apache Kafka service is now fully accessible! -Optionally, you can configure access control lists (ACL) for granular permissions and create something called topics, as shown below. - -#### Optional: Create Kafka topics - -Topics can be seen as categories, allowing you to organize your Kafka records. Producers write to topics, and consumers read from topics. - -To create Kafka topics, first go to the `Topics`{.action} tab then click on the `Add a topic`{.action} button: - -![Add a topic](images/kafka_topics.v2.png){.thumbnail} - -In advanced configuration you can change the default value for the following parameters: - -- Minimum in-sync replica (2 by default) -- Partitions (1 partition by default) -- Replication (3 brokers by default) -- Retention size in bytes (-1: no limitation by default) -- Retention time in hours (-1: no limitation by default) -- Deletion policy - -![Create a topic](images/kafka_create_topic.v2.png){.thumbnail} - -#### Optional: Configure ACLs on topics - -Kafka supports access control lists (ACLs) to manage permissions on topics. This approach allows you to limit the operations that are available to specific connections and to restrict access to certain data sets, which improves the security of your data. - -By default the admin user has access to all topics with admin privileges. You can define some additional ACLs for all users / topics, by clicking on the `Add an ACL`{.action} button from the `ACL`{.action} tab: - -![Enable ACLs](images/kafka_acl.v2.png){.thumbnail} - -For a particular user, and one topic (or all with '*'), define the ACL with the following permissions: - -- **admin**: full access to APIs and topic -- **read**: allow only searching and retrieving data from a topic -- **write**: allow updating, adding, and deleting data from a topic -- **readwrite**: full access to the topic - -![Define ACLs](images/kafka_add_entry1.v2.png){.thumbnail} - -*Note*: Write permission allows the service user to create new indexes that match the pattern, but it does not allow deletion of those indexes. - -When multiple rules match, they are applied in the order listed above. If no rules match, access is denied. - -### First CLI connection - -> [!warning] -> Verify that the IP address visible from your browser application is part of the "Authorised IPs" defined for this Kafka service. -> -> Check also that the user has granted ACLs for the target topics. - -#### Download server and user certificates - -In order to connect to the Apache Kafka service, it is required to use server and user certificates. - -##### Server certificate - -The server CA (*Certificate Authority*) certificate can be downloaded from the `Dashboard`{.action} tab: - -![Kafka server certificate](images/kafka_get_server_certificate.v2.png){.thumbnail} - -##### User certificate and access key - -The user certificate and the user access key can be downloaded from the `Users`{.action} tab: - -![User informations](images/kafka_user_certificate_and_access_key.v2.png){.thumbnail} - -#### Install an Apache Kafka CLI - -As part of the Apache Kafka official installation, you will get different scripts that will also allow you to connect to Kafka in a Java 8+ environment: [Apache Kafka Official Quickstart](https://kafka.apache.org/quickstart). - -We propose to use a generic and more lightweight (does not require a JVM) producer and consumer client instead: `Kcat` (formerly known as `kafkacat`). - -##### **Install Kcat** - -For this client installation, please follow the instructions available at: [Kafkacat Official Github](https://github.com/edenhill/kcat). - -##### **Kcat configuration file** - -Let's create a configuration file to simplify the CLI commands to act as Kafka Producer and Consumer: - -kafkacat.conf : - -```text -bootstrap.servers=kafka-f411d2ae-f411d2ae.database.cloud.ovh.net:20186 -enable.ssl.certificate.verification=false -ssl.ca.location=/home/user/kafkacat/ca.pem -security.protocol=ssl -ssl.key.location=/home/user/kafkacat/service.key -ssl.certificate.location=/home/user/kafkacat/service.cert -``` - -In our example, the cluster address and port are **kafka-f411d2ae-f411d2ae.database.cloud.ovh.net:20186** and the previously downloaded CA certificates are in the **/home/user/kafkacat/** folder. - -Change theses values according to your own configuration. - -##### **Kafka producer** - -For this first example let's push the "test-message-key" and its "test-message-content" to the "my-topic" topic. - -```bash -echo test-message-content | kcat -F kafkacat.conf -P -t my-topic -k test-message-key -``` - -*Note*: depending on the installed binary, the CLI command can be either **kcat** or **kafkacat**. - -##### **Kafka consumer** - -The data can be retrieved from "my-topic". - -```bash -kcat -F kafkacat.conf -C -t my-topic -o -1 -e -``` - -*Note*: depending on the installed binary, the CLI command can be either **kcat** or **kafkacat**. - -## Conclusion - -Congratulations, you now have an up and running Apache Kafka cluster, fully managed and secured. You are able to push and retrieve data easily via CLI. +Explore practical guidance on how to configure your OVHcloud Public Cloud Kafka cluster to accept incoming connections covering access setup via the control panel or API and essential network settings at: [Configure Kafka cluster connections](/pages/public_cloud/data_analytics/analytics/kafka_incoming_connections) ## Go further -[Kafka capabilities](/pages/public_cloud/public_cloud_databases/kafka_01_capabilities) - [Kafka Official documentation](https://kafka.apache.org/documentation/) [Kafka clients](https://cwiki.apache.org/confluence/display/KAFKA/Clients) diff --git a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.es-us.md b/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.es-us.md index 45123c53fb2..efe7b316b67 100644 --- a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.es-us.md +++ b/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.es-us.md @@ -1,256 +1,35 @@ --- title: Kafka - Getting started -excerpt: Find out how to set up and manage your Kafka service -updated: 2025-08-04 +excerpt: Discover Kafka on OVHcloud Public Cloud +updated: 2025-08-25 --- -## Objectives +Kafka on OVHcloud is a fully managed, distributed data-streaming platform available through OVHcloud’s Public Cloud. Built on the open-source strength of Apache Kafka, it’s crafted for event-driven applications, near-real-time data transfer, streaming analytics, and high-throughput pipelines. Your data is stored in organized topics and partitions, enabling OVHcloud to handle multiple simultaneous data flows with scalability and resilience. With intuitive management via the OVHcloud Control Panel (or API/Terraform), you can provision, configure, and deploy Kafka clusters in minutes, offloading infrastructure management and keeping focus squarely on your data-driven use cases. -Apache Kafka is an open-source and highly resilient event streaming platform based on 3 main capabilities: +## Most viewed resources -- write or read data to/from stream events; -- store streams of events; -- process streams of events. +### Product page -You can get more information on Kafka from the [official Kafka website](https://kafka.apache.org/intro){.external}. +Discover how OVHcloud's Managed Apache Kafka® service lets you deploy and manage full Apache Kafka clusters within the Public Cloud complete with scalable topics and partitions, integrated tools like Schema Registry and high availability, plus support for Terraform, private networking, and competitive pay-as-you-go pricing at: [Kafka product page](/links/public-cloud/analytics-kafka) -This guide explains how to successfully configure Public Cloud Databases for Kafka via the OVHcloud Control Panel. +### Pricing -## Requirements +Explore a comprehensive breakdown of OVHcloud’s Public Cloud pricing, where you’ll find detailed hourly and monthly rates across services like virtual instances, managed databases, streaming and analytics tools—including plans for Kafka at: [Kafka pricing](/links/public-cloud/prices-kafka) -- access to the [OVHcloud Control Panel](/links/manager) -- a [Public Cloud project](/links/public-cloud/public-cloud) in your OVHcloud account +### Capabilities -## Instructions +Discover the technical capabilities and limitations of OVHcloud's managed Apache Kafka with detailed guidance on supported versions, resource limits, networking, logging, topics, users, and much more—on this support page: [Kafka capabilities and limitations](/pages/public_cloud/public_cloud_databases/kafka_01_capabilities) -### Subscribe to the service +### Create a cluster -Log in to your [OVHcloud Control Panel](/links/manager) and switch to `Public Cloud`{.action} in the top navigation bar. After selecting your Public Cloud project, click on `Data Streaming`{.action} in the left-hand navigation bar under **Databases & Analytics**. +Explore step-by-step instructions on how to create a managed Kafka database cluster on OVHcloud Public Cloud via the Control Panel, datacentre selection, plan, instance configuration, storage sizing, and cluster setup options at: [Create a Kafka cluster](/pages/public_cloud/data_analytics/analytics/kafka_create_cluster) -Click the `Create a service`{.action} button. +### Configure cluster for incoming connections -#### Select your analytics service - -Click on the type of analytics service you want to use and its version. -A random name is generated for your service that can change in this step or later. - -![Choose data streaming service](images/kafka_db_type.v2.png){.thumbnail} - -#### Select a datacentre - -Choose the geographical region of the datacentre where your service will be hosted and the deployment mode (1-AZ vs 3-AZ). - -![Choose a datacentre](images/kafka_region.v2.png){.thumbnail} - -#### Select a plan - -In this step, choose an appropriate service plan. If needed, you will be able to upgrade or downgrade the plan after creation. - -![Choose plan](images/kafka_solution_business.v2.png){.thumbnail} - -Please visit the [capabilities page](/products/public-cloud-data-analytics) of your selected analytics service for detailed information on each plan's properties. - -#### Select the instance - -Choose the instance type for the nodes of your service, you will be able to change it afterward. The number of nodes depends on the plan previously chosen. - -![Choose instance](images/kafka_features.v2.png){.thumbnail} - -#### Select the storage - -Storage could be scaled up to 3 time the base storage. - -![Choose storage](images/kafka_storage.v2.png){.thumbnail} - -#### Configure your options - -Choose the network options for your service and whitelist the IP addresses that will access the service. - -![Configure options](images/kafka_options.v2.png){.thumbnail} - -#### Review and confirm - -A summary of your order is display to help you review your service configuration. - -![Review order](images/kafka_configuration.1.v2.png){.thumbnail} - -The components of the price is also summarized with a monthly estimation. - -![Review pricing](images/kafka_configuration.2.v2.png){.thumbnail} - -Click the `API and Terraform equivalent`{.action} button to open the following window: - -![API and Terraform equivalent](images/kafka_configuration.3.v2.png){.thumbnail} - -The informations displayed in this window could help you automate your service creation with the [OVHcloud API](/pages/manage_and_operate/api/first-steps) or the OVHcloud Terraform Provider. - -When you are ready click the `Order`{.action} button to create your service. -In a matter of minutes, your new Apache Kafka service will be deployed. -Messages in the OVHcloud Control Panel will inform you when the streaming tool is ready to use. - -### Configure the Apache Kafka service - -Once the Public Cloud Databases for Kafka service is up and running, you will have to define at least one user and one authorised IP (if not already provided during the order) in order to fully connect to the service (as producer or consumer). - -![Kafka Concept](images/kafka_concept.png){.thumbnail} - -The `Dashboard`{.action} tab automatically updates when your service is ready. - -![Kafka General information](images/kafka_cluster_ready_to_configure.v2.png){.thumbnail} - -#### Mandatory: Set up a user - -Switch to the `Users`{.action} tab. An admin user name `avnadmin` is preconfigured during the service installation. - -![Users](images/kafka_users.v2.png){.thumbnail} - -You can add more users by clicking the `Add user`{.action} button. - -![Add a user](images/kafka_add_user.v2.png){.thumbnail} - -Enter a username, then click `Create User`{.action}. - -Passwords need to be reset from the `Users`{.action} table. - -![Password reset](images/kafka_user_password_reset1.v2.png){.thumbnail} - -#### Mandatory: Configure authorised IPs - -> [!warning] -> For security reasons the default network configuration doesn't allow any incoming connections. It is thus critical to authorize the suitable IP addresses in order to successfully access your Kafka cluster. - -If you did not define the authorised IPs during the order you could do it in the `Configuration`{.action} tab. At least one IP address must be authorised here before you can connect to your database. - -![Authorised IP](images/kafka_authorized_ip.v2.png){.thumbnail} - -Add the IP address of your computer by using the `Current IP`{.action} button. -You will be able to remove IPs from the table afterward. - -![Add IP](images/kafka_add_ip.v2.png){.thumbnail} - -Your Apache Kafka service is now fully accessible! -Optionally, you can configure access control lists (ACL) for granular permissions and create something called topics, as shown below. - -#### Optional: Create Kafka topics - -Topics can be seen as categories, allowing you to organize your Kafka records. Producers write to topics, and consumers read from topics. - -To create Kafka topics, first go to the `Topics`{.action} tab then click on the `Add a topic`{.action} button: - -![Add a topic](images/kafka_topics.v2.png){.thumbnail} - -In advanced configuration you can change the default value for the following parameters: - -- Minimum in-sync replica (2 by default) -- Partitions (1 partition by default) -- Replication (3 brokers by default) -- Retention size in bytes (-1: no limitation by default) -- Retention time in hours (-1: no limitation by default) -- Deletion policy - -![Create a topic](images/kafka_create_topic.v2.png){.thumbnail} - -#### Optional: Configure ACLs on topics - -Kafka supports access control lists (ACLs) to manage permissions on topics. This approach allows you to limit the operations that are available to specific connections and to restrict access to certain data sets, which improves the security of your data. - -By default the admin user has access to all topics with admin privileges. You can define some additional ACLs for all users / topics, by clicking on the `Add an ACL`{.action} button from the `ACL`{.action} tab: - -![Enable ACLs](images/kafka_acl.v2.png){.thumbnail} - -For a particular user, and one topic (or all with '*'), define the ACL with the following permissions: - -- **admin**: full access to APIs and topic -- **read**: allow only searching and retrieving data from a topic -- **write**: allow updating, adding, and deleting data from a topic -- **readwrite**: full access to the topic - -![Define ACLs](images/kafka_add_entry1.v2.png){.thumbnail} - -*Note*: Write permission allows the service user to create new indexes that match the pattern, but it does not allow deletion of those indexes. - -When multiple rules match, they are applied in the order listed above. If no rules match, access is denied. - -### First CLI connection - -> [!warning] -> Verify that the IP address visible from your browser application is part of the "Authorised IPs" defined for this Kafka service. -> -> Check also that the user has granted ACLs for the target topics. - -#### Download server and user certificates - -In order to connect to the Apache Kafka service, it is required to use server and user certificates. - -##### Server certificate - -The server CA (*Certificate Authority*) certificate can be downloaded from the `Dashboard`{.action} tab: - -![Kafka server certificate](images/kafka_get_server_certificate.v2.png){.thumbnail} - -##### User certificate and access key - -The user certificate and the user access key can be downloaded from the `Users`{.action} tab: - -![User informations](images/kafka_user_certificate_and_access_key.v2.png){.thumbnail} - -#### Install an Apache Kafka CLI - -As part of the Apache Kafka official installation, you will get different scripts that will also allow you to connect to Kafka in a Java 8+ environment: [Apache Kafka Official Quickstart](https://kafka.apache.org/quickstart). - -We propose to use a generic and more lightweight (does not require a JVM) producer and consumer client instead: `Kcat` (formerly known as `kafkacat`). - -##### **Install Kcat** - -For this client installation, please follow the instructions available at: [Kafkacat Official Github](https://github.com/edenhill/kcat). - -##### **Kcat configuration file** - -Let's create a configuration file to simplify the CLI commands to act as Kafka Producer and Consumer: - -kafkacat.conf : - -```text -bootstrap.servers=kafka-f411d2ae-f411d2ae.database.cloud.ovh.net:20186 -enable.ssl.certificate.verification=false -ssl.ca.location=/home/user/kafkacat/ca.pem -security.protocol=ssl -ssl.key.location=/home/user/kafkacat/service.key -ssl.certificate.location=/home/user/kafkacat/service.cert -``` - -In our example, the cluster address and port are **kafka-f411d2ae-f411d2ae.database.cloud.ovh.net:20186** and the previously downloaded CA certificates are in the **/home/user/kafkacat/** folder. - -Change theses values according to your own configuration. - -##### **Kafka producer** - -For this first example let's push the "test-message-key" and its "test-message-content" to the "my-topic" topic. - -```bash -echo test-message-content | kcat -F kafkacat.conf -P -t my-topic -k test-message-key -``` - -*Note*: depending on the installed binary, the CLI command can be either **kcat** or **kafkacat**. - -##### **Kafka consumer** - -The data can be retrieved from "my-topic". - -```bash -kcat -F kafkacat.conf -C -t my-topic -o -1 -e -``` - -*Note*: depending on the installed binary, the CLI command can be either **kcat** or **kafkacat**. - -## Conclusion - -Congratulations, you now have an up and running Apache Kafka cluster, fully managed and secured. You are able to push and retrieve data easily via CLI. +Explore practical guidance on how to configure your OVHcloud Public Cloud Kafka cluster to accept incoming connections covering access setup via the control panel or API and essential network settings at: [Configure Kafka cluster connections](/pages/public_cloud/data_analytics/analytics/kafka_incoming_connections) ## Go further -[Kafka capabilities](/pages/public_cloud/public_cloud_databases/kafka_01_capabilities) - [Kafka Official documentation](https://kafka.apache.org/documentation/) [Kafka clients](https://cwiki.apache.org/confluence/display/KAFKA/Clients) diff --git a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.fr-ca.md b/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.fr-ca.md index 45123c53fb2..efe7b316b67 100644 --- a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.fr-ca.md +++ b/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.fr-ca.md @@ -1,256 +1,35 @@ --- title: Kafka - Getting started -excerpt: Find out how to set up and manage your Kafka service -updated: 2025-08-04 +excerpt: Discover Kafka on OVHcloud Public Cloud +updated: 2025-08-25 --- -## Objectives +Kafka on OVHcloud is a fully managed, distributed data-streaming platform available through OVHcloud’s Public Cloud. Built on the open-source strength of Apache Kafka, it’s crafted for event-driven applications, near-real-time data transfer, streaming analytics, and high-throughput pipelines. Your data is stored in organized topics and partitions, enabling OVHcloud to handle multiple simultaneous data flows with scalability and resilience. With intuitive management via the OVHcloud Control Panel (or API/Terraform), you can provision, configure, and deploy Kafka clusters in minutes, offloading infrastructure management and keeping focus squarely on your data-driven use cases. -Apache Kafka is an open-source and highly resilient event streaming platform based on 3 main capabilities: +## Most viewed resources -- write or read data to/from stream events; -- store streams of events; -- process streams of events. +### Product page -You can get more information on Kafka from the [official Kafka website](https://kafka.apache.org/intro){.external}. +Discover how OVHcloud's Managed Apache Kafka® service lets you deploy and manage full Apache Kafka clusters within the Public Cloud complete with scalable topics and partitions, integrated tools like Schema Registry and high availability, plus support for Terraform, private networking, and competitive pay-as-you-go pricing at: [Kafka product page](/links/public-cloud/analytics-kafka) -This guide explains how to successfully configure Public Cloud Databases for Kafka via the OVHcloud Control Panel. +### Pricing -## Requirements +Explore a comprehensive breakdown of OVHcloud’s Public Cloud pricing, where you’ll find detailed hourly and monthly rates across services like virtual instances, managed databases, streaming and analytics tools—including plans for Kafka at: [Kafka pricing](/links/public-cloud/prices-kafka) -- access to the [OVHcloud Control Panel](/links/manager) -- a [Public Cloud project](/links/public-cloud/public-cloud) in your OVHcloud account +### Capabilities -## Instructions +Discover the technical capabilities and limitations of OVHcloud's managed Apache Kafka with detailed guidance on supported versions, resource limits, networking, logging, topics, users, and much more—on this support page: [Kafka capabilities and limitations](/pages/public_cloud/public_cloud_databases/kafka_01_capabilities) -### Subscribe to the service +### Create a cluster -Log in to your [OVHcloud Control Panel](/links/manager) and switch to `Public Cloud`{.action} in the top navigation bar. After selecting your Public Cloud project, click on `Data Streaming`{.action} in the left-hand navigation bar under **Databases & Analytics**. +Explore step-by-step instructions on how to create a managed Kafka database cluster on OVHcloud Public Cloud via the Control Panel, datacentre selection, plan, instance configuration, storage sizing, and cluster setup options at: [Create a Kafka cluster](/pages/public_cloud/data_analytics/analytics/kafka_create_cluster) -Click the `Create a service`{.action} button. +### Configure cluster for incoming connections -#### Select your analytics service - -Click on the type of analytics service you want to use and its version. -A random name is generated for your service that can change in this step or later. - -![Choose data streaming service](images/kafka_db_type.v2.png){.thumbnail} - -#### Select a datacentre - -Choose the geographical region of the datacentre where your service will be hosted and the deployment mode (1-AZ vs 3-AZ). - -![Choose a datacentre](images/kafka_region.v2.png){.thumbnail} - -#### Select a plan - -In this step, choose an appropriate service plan. If needed, you will be able to upgrade or downgrade the plan after creation. - -![Choose plan](images/kafka_solution_business.v2.png){.thumbnail} - -Please visit the [capabilities page](/products/public-cloud-data-analytics) of your selected analytics service for detailed information on each plan's properties. - -#### Select the instance - -Choose the instance type for the nodes of your service, you will be able to change it afterward. The number of nodes depends on the plan previously chosen. - -![Choose instance](images/kafka_features.v2.png){.thumbnail} - -#### Select the storage - -Storage could be scaled up to 3 time the base storage. - -![Choose storage](images/kafka_storage.v2.png){.thumbnail} - -#### Configure your options - -Choose the network options for your service and whitelist the IP addresses that will access the service. - -![Configure options](images/kafka_options.v2.png){.thumbnail} - -#### Review and confirm - -A summary of your order is display to help you review your service configuration. - -![Review order](images/kafka_configuration.1.v2.png){.thumbnail} - -The components of the price is also summarized with a monthly estimation. - -![Review pricing](images/kafka_configuration.2.v2.png){.thumbnail} - -Click the `API and Terraform equivalent`{.action} button to open the following window: - -![API and Terraform equivalent](images/kafka_configuration.3.v2.png){.thumbnail} - -The informations displayed in this window could help you automate your service creation with the [OVHcloud API](/pages/manage_and_operate/api/first-steps) or the OVHcloud Terraform Provider. - -When you are ready click the `Order`{.action} button to create your service. -In a matter of minutes, your new Apache Kafka service will be deployed. -Messages in the OVHcloud Control Panel will inform you when the streaming tool is ready to use. - -### Configure the Apache Kafka service - -Once the Public Cloud Databases for Kafka service is up and running, you will have to define at least one user and one authorised IP (if not already provided during the order) in order to fully connect to the service (as producer or consumer). - -![Kafka Concept](images/kafka_concept.png){.thumbnail} - -The `Dashboard`{.action} tab automatically updates when your service is ready. - -![Kafka General information](images/kafka_cluster_ready_to_configure.v2.png){.thumbnail} - -#### Mandatory: Set up a user - -Switch to the `Users`{.action} tab. An admin user name `avnadmin` is preconfigured during the service installation. - -![Users](images/kafka_users.v2.png){.thumbnail} - -You can add more users by clicking the `Add user`{.action} button. - -![Add a user](images/kafka_add_user.v2.png){.thumbnail} - -Enter a username, then click `Create User`{.action}. - -Passwords need to be reset from the `Users`{.action} table. - -![Password reset](images/kafka_user_password_reset1.v2.png){.thumbnail} - -#### Mandatory: Configure authorised IPs - -> [!warning] -> For security reasons the default network configuration doesn't allow any incoming connections. It is thus critical to authorize the suitable IP addresses in order to successfully access your Kafka cluster. - -If you did not define the authorised IPs during the order you could do it in the `Configuration`{.action} tab. At least one IP address must be authorised here before you can connect to your database. - -![Authorised IP](images/kafka_authorized_ip.v2.png){.thumbnail} - -Add the IP address of your computer by using the `Current IP`{.action} button. -You will be able to remove IPs from the table afterward. - -![Add IP](images/kafka_add_ip.v2.png){.thumbnail} - -Your Apache Kafka service is now fully accessible! -Optionally, you can configure access control lists (ACL) for granular permissions and create something called topics, as shown below. - -#### Optional: Create Kafka topics - -Topics can be seen as categories, allowing you to organize your Kafka records. Producers write to topics, and consumers read from topics. - -To create Kafka topics, first go to the `Topics`{.action} tab then click on the `Add a topic`{.action} button: - -![Add a topic](images/kafka_topics.v2.png){.thumbnail} - -In advanced configuration you can change the default value for the following parameters: - -- Minimum in-sync replica (2 by default) -- Partitions (1 partition by default) -- Replication (3 brokers by default) -- Retention size in bytes (-1: no limitation by default) -- Retention time in hours (-1: no limitation by default) -- Deletion policy - -![Create a topic](images/kafka_create_topic.v2.png){.thumbnail} - -#### Optional: Configure ACLs on topics - -Kafka supports access control lists (ACLs) to manage permissions on topics. This approach allows you to limit the operations that are available to specific connections and to restrict access to certain data sets, which improves the security of your data. - -By default the admin user has access to all topics with admin privileges. You can define some additional ACLs for all users / topics, by clicking on the `Add an ACL`{.action} button from the `ACL`{.action} tab: - -![Enable ACLs](images/kafka_acl.v2.png){.thumbnail} - -For a particular user, and one topic (or all with '*'), define the ACL with the following permissions: - -- **admin**: full access to APIs and topic -- **read**: allow only searching and retrieving data from a topic -- **write**: allow updating, adding, and deleting data from a topic -- **readwrite**: full access to the topic - -![Define ACLs](images/kafka_add_entry1.v2.png){.thumbnail} - -*Note*: Write permission allows the service user to create new indexes that match the pattern, but it does not allow deletion of those indexes. - -When multiple rules match, they are applied in the order listed above. If no rules match, access is denied. - -### First CLI connection - -> [!warning] -> Verify that the IP address visible from your browser application is part of the "Authorised IPs" defined for this Kafka service. -> -> Check also that the user has granted ACLs for the target topics. - -#### Download server and user certificates - -In order to connect to the Apache Kafka service, it is required to use server and user certificates. - -##### Server certificate - -The server CA (*Certificate Authority*) certificate can be downloaded from the `Dashboard`{.action} tab: - -![Kafka server certificate](images/kafka_get_server_certificate.v2.png){.thumbnail} - -##### User certificate and access key - -The user certificate and the user access key can be downloaded from the `Users`{.action} tab: - -![User informations](images/kafka_user_certificate_and_access_key.v2.png){.thumbnail} - -#### Install an Apache Kafka CLI - -As part of the Apache Kafka official installation, you will get different scripts that will also allow you to connect to Kafka in a Java 8+ environment: [Apache Kafka Official Quickstart](https://kafka.apache.org/quickstart). - -We propose to use a generic and more lightweight (does not require a JVM) producer and consumer client instead: `Kcat` (formerly known as `kafkacat`). - -##### **Install Kcat** - -For this client installation, please follow the instructions available at: [Kafkacat Official Github](https://github.com/edenhill/kcat). - -##### **Kcat configuration file** - -Let's create a configuration file to simplify the CLI commands to act as Kafka Producer and Consumer: - -kafkacat.conf : - -```text -bootstrap.servers=kafka-f411d2ae-f411d2ae.database.cloud.ovh.net:20186 -enable.ssl.certificate.verification=false -ssl.ca.location=/home/user/kafkacat/ca.pem -security.protocol=ssl -ssl.key.location=/home/user/kafkacat/service.key -ssl.certificate.location=/home/user/kafkacat/service.cert -``` - -In our example, the cluster address and port are **kafka-f411d2ae-f411d2ae.database.cloud.ovh.net:20186** and the previously downloaded CA certificates are in the **/home/user/kafkacat/** folder. - -Change theses values according to your own configuration. - -##### **Kafka producer** - -For this first example let's push the "test-message-key" and its "test-message-content" to the "my-topic" topic. - -```bash -echo test-message-content | kcat -F kafkacat.conf -P -t my-topic -k test-message-key -``` - -*Note*: depending on the installed binary, the CLI command can be either **kcat** or **kafkacat**. - -##### **Kafka consumer** - -The data can be retrieved from "my-topic". - -```bash -kcat -F kafkacat.conf -C -t my-topic -o -1 -e -``` - -*Note*: depending on the installed binary, the CLI command can be either **kcat** or **kafkacat**. - -## Conclusion - -Congratulations, you now have an up and running Apache Kafka cluster, fully managed and secured. You are able to push and retrieve data easily via CLI. +Explore practical guidance on how to configure your OVHcloud Public Cloud Kafka cluster to accept incoming connections covering access setup via the control panel or API and essential network settings at: [Configure Kafka cluster connections](/pages/public_cloud/data_analytics/analytics/kafka_incoming_connections) ## Go further -[Kafka capabilities](/pages/public_cloud/public_cloud_databases/kafka_01_capabilities) - [Kafka Official documentation](https://kafka.apache.org/documentation/) [Kafka clients](https://cwiki.apache.org/confluence/display/KAFKA/Clients) diff --git a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.fr-fr.md b/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.fr-fr.md index 45123c53fb2..efe7b316b67 100644 --- a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.fr-fr.md +++ b/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.fr-fr.md @@ -1,256 +1,35 @@ --- title: Kafka - Getting started -excerpt: Find out how to set up and manage your Kafka service -updated: 2025-08-04 +excerpt: Discover Kafka on OVHcloud Public Cloud +updated: 2025-08-25 --- -## Objectives +Kafka on OVHcloud is a fully managed, distributed data-streaming platform available through OVHcloud’s Public Cloud. Built on the open-source strength of Apache Kafka, it’s crafted for event-driven applications, near-real-time data transfer, streaming analytics, and high-throughput pipelines. Your data is stored in organized topics and partitions, enabling OVHcloud to handle multiple simultaneous data flows with scalability and resilience. With intuitive management via the OVHcloud Control Panel (or API/Terraform), you can provision, configure, and deploy Kafka clusters in minutes, offloading infrastructure management and keeping focus squarely on your data-driven use cases. -Apache Kafka is an open-source and highly resilient event streaming platform based on 3 main capabilities: +## Most viewed resources -- write or read data to/from stream events; -- store streams of events; -- process streams of events. +### Product page -You can get more information on Kafka from the [official Kafka website](https://kafka.apache.org/intro){.external}. +Discover how OVHcloud's Managed Apache Kafka® service lets you deploy and manage full Apache Kafka clusters within the Public Cloud complete with scalable topics and partitions, integrated tools like Schema Registry and high availability, plus support for Terraform, private networking, and competitive pay-as-you-go pricing at: [Kafka product page](/links/public-cloud/analytics-kafka) -This guide explains how to successfully configure Public Cloud Databases for Kafka via the OVHcloud Control Panel. +### Pricing -## Requirements +Explore a comprehensive breakdown of OVHcloud’s Public Cloud pricing, where you’ll find detailed hourly and monthly rates across services like virtual instances, managed databases, streaming and analytics tools—including plans for Kafka at: [Kafka pricing](/links/public-cloud/prices-kafka) -- access to the [OVHcloud Control Panel](/links/manager) -- a [Public Cloud project](/links/public-cloud/public-cloud) in your OVHcloud account +### Capabilities -## Instructions +Discover the technical capabilities and limitations of OVHcloud's managed Apache Kafka with detailed guidance on supported versions, resource limits, networking, logging, topics, users, and much more—on this support page: [Kafka capabilities and limitations](/pages/public_cloud/public_cloud_databases/kafka_01_capabilities) -### Subscribe to the service +### Create a cluster -Log in to your [OVHcloud Control Panel](/links/manager) and switch to `Public Cloud`{.action} in the top navigation bar. After selecting your Public Cloud project, click on `Data Streaming`{.action} in the left-hand navigation bar under **Databases & Analytics**. +Explore step-by-step instructions on how to create a managed Kafka database cluster on OVHcloud Public Cloud via the Control Panel, datacentre selection, plan, instance configuration, storage sizing, and cluster setup options at: [Create a Kafka cluster](/pages/public_cloud/data_analytics/analytics/kafka_create_cluster) -Click the `Create a service`{.action} button. +### Configure cluster for incoming connections -#### Select your analytics service - -Click on the type of analytics service you want to use and its version. -A random name is generated for your service that can change in this step or later. - -![Choose data streaming service](images/kafka_db_type.v2.png){.thumbnail} - -#### Select a datacentre - -Choose the geographical region of the datacentre where your service will be hosted and the deployment mode (1-AZ vs 3-AZ). - -![Choose a datacentre](images/kafka_region.v2.png){.thumbnail} - -#### Select a plan - -In this step, choose an appropriate service plan. If needed, you will be able to upgrade or downgrade the plan after creation. - -![Choose plan](images/kafka_solution_business.v2.png){.thumbnail} - -Please visit the [capabilities page](/products/public-cloud-data-analytics) of your selected analytics service for detailed information on each plan's properties. - -#### Select the instance - -Choose the instance type for the nodes of your service, you will be able to change it afterward. The number of nodes depends on the plan previously chosen. - -![Choose instance](images/kafka_features.v2.png){.thumbnail} - -#### Select the storage - -Storage could be scaled up to 3 time the base storage. - -![Choose storage](images/kafka_storage.v2.png){.thumbnail} - -#### Configure your options - -Choose the network options for your service and whitelist the IP addresses that will access the service. - -![Configure options](images/kafka_options.v2.png){.thumbnail} - -#### Review and confirm - -A summary of your order is display to help you review your service configuration. - -![Review order](images/kafka_configuration.1.v2.png){.thumbnail} - -The components of the price is also summarized with a monthly estimation. - -![Review pricing](images/kafka_configuration.2.v2.png){.thumbnail} - -Click the `API and Terraform equivalent`{.action} button to open the following window: - -![API and Terraform equivalent](images/kafka_configuration.3.v2.png){.thumbnail} - -The informations displayed in this window could help you automate your service creation with the [OVHcloud API](/pages/manage_and_operate/api/first-steps) or the OVHcloud Terraform Provider. - -When you are ready click the `Order`{.action} button to create your service. -In a matter of minutes, your new Apache Kafka service will be deployed. -Messages in the OVHcloud Control Panel will inform you when the streaming tool is ready to use. - -### Configure the Apache Kafka service - -Once the Public Cloud Databases for Kafka service is up and running, you will have to define at least one user and one authorised IP (if not already provided during the order) in order to fully connect to the service (as producer or consumer). - -![Kafka Concept](images/kafka_concept.png){.thumbnail} - -The `Dashboard`{.action} tab automatically updates when your service is ready. - -![Kafka General information](images/kafka_cluster_ready_to_configure.v2.png){.thumbnail} - -#### Mandatory: Set up a user - -Switch to the `Users`{.action} tab. An admin user name `avnadmin` is preconfigured during the service installation. - -![Users](images/kafka_users.v2.png){.thumbnail} - -You can add more users by clicking the `Add user`{.action} button. - -![Add a user](images/kafka_add_user.v2.png){.thumbnail} - -Enter a username, then click `Create User`{.action}. - -Passwords need to be reset from the `Users`{.action} table. - -![Password reset](images/kafka_user_password_reset1.v2.png){.thumbnail} - -#### Mandatory: Configure authorised IPs - -> [!warning] -> For security reasons the default network configuration doesn't allow any incoming connections. It is thus critical to authorize the suitable IP addresses in order to successfully access your Kafka cluster. - -If you did not define the authorised IPs during the order you could do it in the `Configuration`{.action} tab. At least one IP address must be authorised here before you can connect to your database. - -![Authorised IP](images/kafka_authorized_ip.v2.png){.thumbnail} - -Add the IP address of your computer by using the `Current IP`{.action} button. -You will be able to remove IPs from the table afterward. - -![Add IP](images/kafka_add_ip.v2.png){.thumbnail} - -Your Apache Kafka service is now fully accessible! -Optionally, you can configure access control lists (ACL) for granular permissions and create something called topics, as shown below. - -#### Optional: Create Kafka topics - -Topics can be seen as categories, allowing you to organize your Kafka records. Producers write to topics, and consumers read from topics. - -To create Kafka topics, first go to the `Topics`{.action} tab then click on the `Add a topic`{.action} button: - -![Add a topic](images/kafka_topics.v2.png){.thumbnail} - -In advanced configuration you can change the default value for the following parameters: - -- Minimum in-sync replica (2 by default) -- Partitions (1 partition by default) -- Replication (3 brokers by default) -- Retention size in bytes (-1: no limitation by default) -- Retention time in hours (-1: no limitation by default) -- Deletion policy - -![Create a topic](images/kafka_create_topic.v2.png){.thumbnail} - -#### Optional: Configure ACLs on topics - -Kafka supports access control lists (ACLs) to manage permissions on topics. This approach allows you to limit the operations that are available to specific connections and to restrict access to certain data sets, which improves the security of your data. - -By default the admin user has access to all topics with admin privileges. You can define some additional ACLs for all users / topics, by clicking on the `Add an ACL`{.action} button from the `ACL`{.action} tab: - -![Enable ACLs](images/kafka_acl.v2.png){.thumbnail} - -For a particular user, and one topic (or all with '*'), define the ACL with the following permissions: - -- **admin**: full access to APIs and topic -- **read**: allow only searching and retrieving data from a topic -- **write**: allow updating, adding, and deleting data from a topic -- **readwrite**: full access to the topic - -![Define ACLs](images/kafka_add_entry1.v2.png){.thumbnail} - -*Note*: Write permission allows the service user to create new indexes that match the pattern, but it does not allow deletion of those indexes. - -When multiple rules match, they are applied in the order listed above. If no rules match, access is denied. - -### First CLI connection - -> [!warning] -> Verify that the IP address visible from your browser application is part of the "Authorised IPs" defined for this Kafka service. -> -> Check also that the user has granted ACLs for the target topics. - -#### Download server and user certificates - -In order to connect to the Apache Kafka service, it is required to use server and user certificates. - -##### Server certificate - -The server CA (*Certificate Authority*) certificate can be downloaded from the `Dashboard`{.action} tab: - -![Kafka server certificate](images/kafka_get_server_certificate.v2.png){.thumbnail} - -##### User certificate and access key - -The user certificate and the user access key can be downloaded from the `Users`{.action} tab: - -![User informations](images/kafka_user_certificate_and_access_key.v2.png){.thumbnail} - -#### Install an Apache Kafka CLI - -As part of the Apache Kafka official installation, you will get different scripts that will also allow you to connect to Kafka in a Java 8+ environment: [Apache Kafka Official Quickstart](https://kafka.apache.org/quickstart). - -We propose to use a generic and more lightweight (does not require a JVM) producer and consumer client instead: `Kcat` (formerly known as `kafkacat`). - -##### **Install Kcat** - -For this client installation, please follow the instructions available at: [Kafkacat Official Github](https://github.com/edenhill/kcat). - -##### **Kcat configuration file** - -Let's create a configuration file to simplify the CLI commands to act as Kafka Producer and Consumer: - -kafkacat.conf : - -```text -bootstrap.servers=kafka-f411d2ae-f411d2ae.database.cloud.ovh.net:20186 -enable.ssl.certificate.verification=false -ssl.ca.location=/home/user/kafkacat/ca.pem -security.protocol=ssl -ssl.key.location=/home/user/kafkacat/service.key -ssl.certificate.location=/home/user/kafkacat/service.cert -``` - -In our example, the cluster address and port are **kafka-f411d2ae-f411d2ae.database.cloud.ovh.net:20186** and the previously downloaded CA certificates are in the **/home/user/kafkacat/** folder. - -Change theses values according to your own configuration. - -##### **Kafka producer** - -For this first example let's push the "test-message-key" and its "test-message-content" to the "my-topic" topic. - -```bash -echo test-message-content | kcat -F kafkacat.conf -P -t my-topic -k test-message-key -``` - -*Note*: depending on the installed binary, the CLI command can be either **kcat** or **kafkacat**. - -##### **Kafka consumer** - -The data can be retrieved from "my-topic". - -```bash -kcat -F kafkacat.conf -C -t my-topic -o -1 -e -``` - -*Note*: depending on the installed binary, the CLI command can be either **kcat** or **kafkacat**. - -## Conclusion - -Congratulations, you now have an up and running Apache Kafka cluster, fully managed and secured. You are able to push and retrieve data easily via CLI. +Explore practical guidance on how to configure your OVHcloud Public Cloud Kafka cluster to accept incoming connections covering access setup via the control panel or API and essential network settings at: [Configure Kafka cluster connections](/pages/public_cloud/data_analytics/analytics/kafka_incoming_connections) ## Go further -[Kafka capabilities](/pages/public_cloud/public_cloud_databases/kafka_01_capabilities) - [Kafka Official documentation](https://kafka.apache.org/documentation/) [Kafka clients](https://cwiki.apache.org/confluence/display/KAFKA/Clients) diff --git a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.it-it.md b/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.it-it.md index 45123c53fb2..efe7b316b67 100644 --- a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.it-it.md +++ b/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.it-it.md @@ -1,256 +1,35 @@ --- title: Kafka - Getting started -excerpt: Find out how to set up and manage your Kafka service -updated: 2025-08-04 +excerpt: Discover Kafka on OVHcloud Public Cloud +updated: 2025-08-25 --- -## Objectives +Kafka on OVHcloud is a fully managed, distributed data-streaming platform available through OVHcloud’s Public Cloud. Built on the open-source strength of Apache Kafka, it’s crafted for event-driven applications, near-real-time data transfer, streaming analytics, and high-throughput pipelines. Your data is stored in organized topics and partitions, enabling OVHcloud to handle multiple simultaneous data flows with scalability and resilience. With intuitive management via the OVHcloud Control Panel (or API/Terraform), you can provision, configure, and deploy Kafka clusters in minutes, offloading infrastructure management and keeping focus squarely on your data-driven use cases. -Apache Kafka is an open-source and highly resilient event streaming platform based on 3 main capabilities: +## Most viewed resources -- write or read data to/from stream events; -- store streams of events; -- process streams of events. +### Product page -You can get more information on Kafka from the [official Kafka website](https://kafka.apache.org/intro){.external}. +Discover how OVHcloud's Managed Apache Kafka® service lets you deploy and manage full Apache Kafka clusters within the Public Cloud complete with scalable topics and partitions, integrated tools like Schema Registry and high availability, plus support for Terraform, private networking, and competitive pay-as-you-go pricing at: [Kafka product page](/links/public-cloud/analytics-kafka) -This guide explains how to successfully configure Public Cloud Databases for Kafka via the OVHcloud Control Panel. +### Pricing -## Requirements +Explore a comprehensive breakdown of OVHcloud’s Public Cloud pricing, where you’ll find detailed hourly and monthly rates across services like virtual instances, managed databases, streaming and analytics tools—including plans for Kafka at: [Kafka pricing](/links/public-cloud/prices-kafka) -- access to the [OVHcloud Control Panel](/links/manager) -- a [Public Cloud project](/links/public-cloud/public-cloud) in your OVHcloud account +### Capabilities -## Instructions +Discover the technical capabilities and limitations of OVHcloud's managed Apache Kafka with detailed guidance on supported versions, resource limits, networking, logging, topics, users, and much more—on this support page: [Kafka capabilities and limitations](/pages/public_cloud/public_cloud_databases/kafka_01_capabilities) -### Subscribe to the service +### Create a cluster -Log in to your [OVHcloud Control Panel](/links/manager) and switch to `Public Cloud`{.action} in the top navigation bar. After selecting your Public Cloud project, click on `Data Streaming`{.action} in the left-hand navigation bar under **Databases & Analytics**. +Explore step-by-step instructions on how to create a managed Kafka database cluster on OVHcloud Public Cloud via the Control Panel, datacentre selection, plan, instance configuration, storage sizing, and cluster setup options at: [Create a Kafka cluster](/pages/public_cloud/data_analytics/analytics/kafka_create_cluster) -Click the `Create a service`{.action} button. +### Configure cluster for incoming connections -#### Select your analytics service - -Click on the type of analytics service you want to use and its version. -A random name is generated for your service that can change in this step or later. - -![Choose data streaming service](images/kafka_db_type.v2.png){.thumbnail} - -#### Select a datacentre - -Choose the geographical region of the datacentre where your service will be hosted and the deployment mode (1-AZ vs 3-AZ). - -![Choose a datacentre](images/kafka_region.v2.png){.thumbnail} - -#### Select a plan - -In this step, choose an appropriate service plan. If needed, you will be able to upgrade or downgrade the plan after creation. - -![Choose plan](images/kafka_solution_business.v2.png){.thumbnail} - -Please visit the [capabilities page](/products/public-cloud-data-analytics) of your selected analytics service for detailed information on each plan's properties. - -#### Select the instance - -Choose the instance type for the nodes of your service, you will be able to change it afterward. The number of nodes depends on the plan previously chosen. - -![Choose instance](images/kafka_features.v2.png){.thumbnail} - -#### Select the storage - -Storage could be scaled up to 3 time the base storage. - -![Choose storage](images/kafka_storage.v2.png){.thumbnail} - -#### Configure your options - -Choose the network options for your service and whitelist the IP addresses that will access the service. - -![Configure options](images/kafka_options.v2.png){.thumbnail} - -#### Review and confirm - -A summary of your order is display to help you review your service configuration. - -![Review order](images/kafka_configuration.1.v2.png){.thumbnail} - -The components of the price is also summarized with a monthly estimation. - -![Review pricing](images/kafka_configuration.2.v2.png){.thumbnail} - -Click the `API and Terraform equivalent`{.action} button to open the following window: - -![API and Terraform equivalent](images/kafka_configuration.3.v2.png){.thumbnail} - -The informations displayed in this window could help you automate your service creation with the [OVHcloud API](/pages/manage_and_operate/api/first-steps) or the OVHcloud Terraform Provider. - -When you are ready click the `Order`{.action} button to create your service. -In a matter of minutes, your new Apache Kafka service will be deployed. -Messages in the OVHcloud Control Panel will inform you when the streaming tool is ready to use. - -### Configure the Apache Kafka service - -Once the Public Cloud Databases for Kafka service is up and running, you will have to define at least one user and one authorised IP (if not already provided during the order) in order to fully connect to the service (as producer or consumer). - -![Kafka Concept](images/kafka_concept.png){.thumbnail} - -The `Dashboard`{.action} tab automatically updates when your service is ready. - -![Kafka General information](images/kafka_cluster_ready_to_configure.v2.png){.thumbnail} - -#### Mandatory: Set up a user - -Switch to the `Users`{.action} tab. An admin user name `avnadmin` is preconfigured during the service installation. - -![Users](images/kafka_users.v2.png){.thumbnail} - -You can add more users by clicking the `Add user`{.action} button. - -![Add a user](images/kafka_add_user.v2.png){.thumbnail} - -Enter a username, then click `Create User`{.action}. - -Passwords need to be reset from the `Users`{.action} table. - -![Password reset](images/kafka_user_password_reset1.v2.png){.thumbnail} - -#### Mandatory: Configure authorised IPs - -> [!warning] -> For security reasons the default network configuration doesn't allow any incoming connections. It is thus critical to authorize the suitable IP addresses in order to successfully access your Kafka cluster. - -If you did not define the authorised IPs during the order you could do it in the `Configuration`{.action} tab. At least one IP address must be authorised here before you can connect to your database. - -![Authorised IP](images/kafka_authorized_ip.v2.png){.thumbnail} - -Add the IP address of your computer by using the `Current IP`{.action} button. -You will be able to remove IPs from the table afterward. - -![Add IP](images/kafka_add_ip.v2.png){.thumbnail} - -Your Apache Kafka service is now fully accessible! -Optionally, you can configure access control lists (ACL) for granular permissions and create something called topics, as shown below. - -#### Optional: Create Kafka topics - -Topics can be seen as categories, allowing you to organize your Kafka records. Producers write to topics, and consumers read from topics. - -To create Kafka topics, first go to the `Topics`{.action} tab then click on the `Add a topic`{.action} button: - -![Add a topic](images/kafka_topics.v2.png){.thumbnail} - -In advanced configuration you can change the default value for the following parameters: - -- Minimum in-sync replica (2 by default) -- Partitions (1 partition by default) -- Replication (3 brokers by default) -- Retention size in bytes (-1: no limitation by default) -- Retention time in hours (-1: no limitation by default) -- Deletion policy - -![Create a topic](images/kafka_create_topic.v2.png){.thumbnail} - -#### Optional: Configure ACLs on topics - -Kafka supports access control lists (ACLs) to manage permissions on topics. This approach allows you to limit the operations that are available to specific connections and to restrict access to certain data sets, which improves the security of your data. - -By default the admin user has access to all topics with admin privileges. You can define some additional ACLs for all users / topics, by clicking on the `Add an ACL`{.action} button from the `ACL`{.action} tab: - -![Enable ACLs](images/kafka_acl.v2.png){.thumbnail} - -For a particular user, and one topic (or all with '*'), define the ACL with the following permissions: - -- **admin**: full access to APIs and topic -- **read**: allow only searching and retrieving data from a topic -- **write**: allow updating, adding, and deleting data from a topic -- **readwrite**: full access to the topic - -![Define ACLs](images/kafka_add_entry1.v2.png){.thumbnail} - -*Note*: Write permission allows the service user to create new indexes that match the pattern, but it does not allow deletion of those indexes. - -When multiple rules match, they are applied in the order listed above. If no rules match, access is denied. - -### First CLI connection - -> [!warning] -> Verify that the IP address visible from your browser application is part of the "Authorised IPs" defined for this Kafka service. -> -> Check also that the user has granted ACLs for the target topics. - -#### Download server and user certificates - -In order to connect to the Apache Kafka service, it is required to use server and user certificates. - -##### Server certificate - -The server CA (*Certificate Authority*) certificate can be downloaded from the `Dashboard`{.action} tab: - -![Kafka server certificate](images/kafka_get_server_certificate.v2.png){.thumbnail} - -##### User certificate and access key - -The user certificate and the user access key can be downloaded from the `Users`{.action} tab: - -![User informations](images/kafka_user_certificate_and_access_key.v2.png){.thumbnail} - -#### Install an Apache Kafka CLI - -As part of the Apache Kafka official installation, you will get different scripts that will also allow you to connect to Kafka in a Java 8+ environment: [Apache Kafka Official Quickstart](https://kafka.apache.org/quickstart). - -We propose to use a generic and more lightweight (does not require a JVM) producer and consumer client instead: `Kcat` (formerly known as `kafkacat`). - -##### **Install Kcat** - -For this client installation, please follow the instructions available at: [Kafkacat Official Github](https://github.com/edenhill/kcat). - -##### **Kcat configuration file** - -Let's create a configuration file to simplify the CLI commands to act as Kafka Producer and Consumer: - -kafkacat.conf : - -```text -bootstrap.servers=kafka-f411d2ae-f411d2ae.database.cloud.ovh.net:20186 -enable.ssl.certificate.verification=false -ssl.ca.location=/home/user/kafkacat/ca.pem -security.protocol=ssl -ssl.key.location=/home/user/kafkacat/service.key -ssl.certificate.location=/home/user/kafkacat/service.cert -``` - -In our example, the cluster address and port are **kafka-f411d2ae-f411d2ae.database.cloud.ovh.net:20186** and the previously downloaded CA certificates are in the **/home/user/kafkacat/** folder. - -Change theses values according to your own configuration. - -##### **Kafka producer** - -For this first example let's push the "test-message-key" and its "test-message-content" to the "my-topic" topic. - -```bash -echo test-message-content | kcat -F kafkacat.conf -P -t my-topic -k test-message-key -``` - -*Note*: depending on the installed binary, the CLI command can be either **kcat** or **kafkacat**. - -##### **Kafka consumer** - -The data can be retrieved from "my-topic". - -```bash -kcat -F kafkacat.conf -C -t my-topic -o -1 -e -``` - -*Note*: depending on the installed binary, the CLI command can be either **kcat** or **kafkacat**. - -## Conclusion - -Congratulations, you now have an up and running Apache Kafka cluster, fully managed and secured. You are able to push and retrieve data easily via CLI. +Explore practical guidance on how to configure your OVHcloud Public Cloud Kafka cluster to accept incoming connections covering access setup via the control panel or API and essential network settings at: [Configure Kafka cluster connections](/pages/public_cloud/data_analytics/analytics/kafka_incoming_connections) ## Go further -[Kafka capabilities](/pages/public_cloud/public_cloud_databases/kafka_01_capabilities) - [Kafka Official documentation](https://kafka.apache.org/documentation/) [Kafka clients](https://cwiki.apache.org/confluence/display/KAFKA/Clients) diff --git a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.pl-pl.md b/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.pl-pl.md index 45123c53fb2..efe7b316b67 100644 --- a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.pl-pl.md +++ b/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.pl-pl.md @@ -1,256 +1,35 @@ --- title: Kafka - Getting started -excerpt: Find out how to set up and manage your Kafka service -updated: 2025-08-04 +excerpt: Discover Kafka on OVHcloud Public Cloud +updated: 2025-08-25 --- -## Objectives +Kafka on OVHcloud is a fully managed, distributed data-streaming platform available through OVHcloud’s Public Cloud. Built on the open-source strength of Apache Kafka, it’s crafted for event-driven applications, near-real-time data transfer, streaming analytics, and high-throughput pipelines. Your data is stored in organized topics and partitions, enabling OVHcloud to handle multiple simultaneous data flows with scalability and resilience. With intuitive management via the OVHcloud Control Panel (or API/Terraform), you can provision, configure, and deploy Kafka clusters in minutes, offloading infrastructure management and keeping focus squarely on your data-driven use cases. -Apache Kafka is an open-source and highly resilient event streaming platform based on 3 main capabilities: +## Most viewed resources -- write or read data to/from stream events; -- store streams of events; -- process streams of events. +### Product page -You can get more information on Kafka from the [official Kafka website](https://kafka.apache.org/intro){.external}. +Discover how OVHcloud's Managed Apache Kafka® service lets you deploy and manage full Apache Kafka clusters within the Public Cloud complete with scalable topics and partitions, integrated tools like Schema Registry and high availability, plus support for Terraform, private networking, and competitive pay-as-you-go pricing at: [Kafka product page](/links/public-cloud/analytics-kafka) -This guide explains how to successfully configure Public Cloud Databases for Kafka via the OVHcloud Control Panel. +### Pricing -## Requirements +Explore a comprehensive breakdown of OVHcloud’s Public Cloud pricing, where you’ll find detailed hourly and monthly rates across services like virtual instances, managed databases, streaming and analytics tools—including plans for Kafka at: [Kafka pricing](/links/public-cloud/prices-kafka) -- access to the [OVHcloud Control Panel](/links/manager) -- a [Public Cloud project](/links/public-cloud/public-cloud) in your OVHcloud account +### Capabilities -## Instructions +Discover the technical capabilities and limitations of OVHcloud's managed Apache Kafka with detailed guidance on supported versions, resource limits, networking, logging, topics, users, and much more—on this support page: [Kafka capabilities and limitations](/pages/public_cloud/public_cloud_databases/kafka_01_capabilities) -### Subscribe to the service +### Create a cluster -Log in to your [OVHcloud Control Panel](/links/manager) and switch to `Public Cloud`{.action} in the top navigation bar. After selecting your Public Cloud project, click on `Data Streaming`{.action} in the left-hand navigation bar under **Databases & Analytics**. +Explore step-by-step instructions on how to create a managed Kafka database cluster on OVHcloud Public Cloud via the Control Panel, datacentre selection, plan, instance configuration, storage sizing, and cluster setup options at: [Create a Kafka cluster](/pages/public_cloud/data_analytics/analytics/kafka_create_cluster) -Click the `Create a service`{.action} button. +### Configure cluster for incoming connections -#### Select your analytics service - -Click on the type of analytics service you want to use and its version. -A random name is generated for your service that can change in this step or later. - -![Choose data streaming service](images/kafka_db_type.v2.png){.thumbnail} - -#### Select a datacentre - -Choose the geographical region of the datacentre where your service will be hosted and the deployment mode (1-AZ vs 3-AZ). - -![Choose a datacentre](images/kafka_region.v2.png){.thumbnail} - -#### Select a plan - -In this step, choose an appropriate service plan. If needed, you will be able to upgrade or downgrade the plan after creation. - -![Choose plan](images/kafka_solution_business.v2.png){.thumbnail} - -Please visit the [capabilities page](/products/public-cloud-data-analytics) of your selected analytics service for detailed information on each plan's properties. - -#### Select the instance - -Choose the instance type for the nodes of your service, you will be able to change it afterward. The number of nodes depends on the plan previously chosen. - -![Choose instance](images/kafka_features.v2.png){.thumbnail} - -#### Select the storage - -Storage could be scaled up to 3 time the base storage. - -![Choose storage](images/kafka_storage.v2.png){.thumbnail} - -#### Configure your options - -Choose the network options for your service and whitelist the IP addresses that will access the service. - -![Configure options](images/kafka_options.v2.png){.thumbnail} - -#### Review and confirm - -A summary of your order is display to help you review your service configuration. - -![Review order](images/kafka_configuration.1.v2.png){.thumbnail} - -The components of the price is also summarized with a monthly estimation. - -![Review pricing](images/kafka_configuration.2.v2.png){.thumbnail} - -Click the `API and Terraform equivalent`{.action} button to open the following window: - -![API and Terraform equivalent](images/kafka_configuration.3.v2.png){.thumbnail} - -The informations displayed in this window could help you automate your service creation with the [OVHcloud API](/pages/manage_and_operate/api/first-steps) or the OVHcloud Terraform Provider. - -When you are ready click the `Order`{.action} button to create your service. -In a matter of minutes, your new Apache Kafka service will be deployed. -Messages in the OVHcloud Control Panel will inform you when the streaming tool is ready to use. - -### Configure the Apache Kafka service - -Once the Public Cloud Databases for Kafka service is up and running, you will have to define at least one user and one authorised IP (if not already provided during the order) in order to fully connect to the service (as producer or consumer). - -![Kafka Concept](images/kafka_concept.png){.thumbnail} - -The `Dashboard`{.action} tab automatically updates when your service is ready. - -![Kafka General information](images/kafka_cluster_ready_to_configure.v2.png){.thumbnail} - -#### Mandatory: Set up a user - -Switch to the `Users`{.action} tab. An admin user name `avnadmin` is preconfigured during the service installation. - -![Users](images/kafka_users.v2.png){.thumbnail} - -You can add more users by clicking the `Add user`{.action} button. - -![Add a user](images/kafka_add_user.v2.png){.thumbnail} - -Enter a username, then click `Create User`{.action}. - -Passwords need to be reset from the `Users`{.action} table. - -![Password reset](images/kafka_user_password_reset1.v2.png){.thumbnail} - -#### Mandatory: Configure authorised IPs - -> [!warning] -> For security reasons the default network configuration doesn't allow any incoming connections. It is thus critical to authorize the suitable IP addresses in order to successfully access your Kafka cluster. - -If you did not define the authorised IPs during the order you could do it in the `Configuration`{.action} tab. At least one IP address must be authorised here before you can connect to your database. - -![Authorised IP](images/kafka_authorized_ip.v2.png){.thumbnail} - -Add the IP address of your computer by using the `Current IP`{.action} button. -You will be able to remove IPs from the table afterward. - -![Add IP](images/kafka_add_ip.v2.png){.thumbnail} - -Your Apache Kafka service is now fully accessible! -Optionally, you can configure access control lists (ACL) for granular permissions and create something called topics, as shown below. - -#### Optional: Create Kafka topics - -Topics can be seen as categories, allowing you to organize your Kafka records. Producers write to topics, and consumers read from topics. - -To create Kafka topics, first go to the `Topics`{.action} tab then click on the `Add a topic`{.action} button: - -![Add a topic](images/kafka_topics.v2.png){.thumbnail} - -In advanced configuration you can change the default value for the following parameters: - -- Minimum in-sync replica (2 by default) -- Partitions (1 partition by default) -- Replication (3 brokers by default) -- Retention size in bytes (-1: no limitation by default) -- Retention time in hours (-1: no limitation by default) -- Deletion policy - -![Create a topic](images/kafka_create_topic.v2.png){.thumbnail} - -#### Optional: Configure ACLs on topics - -Kafka supports access control lists (ACLs) to manage permissions on topics. This approach allows you to limit the operations that are available to specific connections and to restrict access to certain data sets, which improves the security of your data. - -By default the admin user has access to all topics with admin privileges. You can define some additional ACLs for all users / topics, by clicking on the `Add an ACL`{.action} button from the `ACL`{.action} tab: - -![Enable ACLs](images/kafka_acl.v2.png){.thumbnail} - -For a particular user, and one topic (or all with '*'), define the ACL with the following permissions: - -- **admin**: full access to APIs and topic -- **read**: allow only searching and retrieving data from a topic -- **write**: allow updating, adding, and deleting data from a topic -- **readwrite**: full access to the topic - -![Define ACLs](images/kafka_add_entry1.v2.png){.thumbnail} - -*Note*: Write permission allows the service user to create new indexes that match the pattern, but it does not allow deletion of those indexes. - -When multiple rules match, they are applied in the order listed above. If no rules match, access is denied. - -### First CLI connection - -> [!warning] -> Verify that the IP address visible from your browser application is part of the "Authorised IPs" defined for this Kafka service. -> -> Check also that the user has granted ACLs for the target topics. - -#### Download server and user certificates - -In order to connect to the Apache Kafka service, it is required to use server and user certificates. - -##### Server certificate - -The server CA (*Certificate Authority*) certificate can be downloaded from the `Dashboard`{.action} tab: - -![Kafka server certificate](images/kafka_get_server_certificate.v2.png){.thumbnail} - -##### User certificate and access key - -The user certificate and the user access key can be downloaded from the `Users`{.action} tab: - -![User informations](images/kafka_user_certificate_and_access_key.v2.png){.thumbnail} - -#### Install an Apache Kafka CLI - -As part of the Apache Kafka official installation, you will get different scripts that will also allow you to connect to Kafka in a Java 8+ environment: [Apache Kafka Official Quickstart](https://kafka.apache.org/quickstart). - -We propose to use a generic and more lightweight (does not require a JVM) producer and consumer client instead: `Kcat` (formerly known as `kafkacat`). - -##### **Install Kcat** - -For this client installation, please follow the instructions available at: [Kafkacat Official Github](https://github.com/edenhill/kcat). - -##### **Kcat configuration file** - -Let's create a configuration file to simplify the CLI commands to act as Kafka Producer and Consumer: - -kafkacat.conf : - -```text -bootstrap.servers=kafka-f411d2ae-f411d2ae.database.cloud.ovh.net:20186 -enable.ssl.certificate.verification=false -ssl.ca.location=/home/user/kafkacat/ca.pem -security.protocol=ssl -ssl.key.location=/home/user/kafkacat/service.key -ssl.certificate.location=/home/user/kafkacat/service.cert -``` - -In our example, the cluster address and port are **kafka-f411d2ae-f411d2ae.database.cloud.ovh.net:20186** and the previously downloaded CA certificates are in the **/home/user/kafkacat/** folder. - -Change theses values according to your own configuration. - -##### **Kafka producer** - -For this first example let's push the "test-message-key" and its "test-message-content" to the "my-topic" topic. - -```bash -echo test-message-content | kcat -F kafkacat.conf -P -t my-topic -k test-message-key -``` - -*Note*: depending on the installed binary, the CLI command can be either **kcat** or **kafkacat**. - -##### **Kafka consumer** - -The data can be retrieved from "my-topic". - -```bash -kcat -F kafkacat.conf -C -t my-topic -o -1 -e -``` - -*Note*: depending on the installed binary, the CLI command can be either **kcat** or **kafkacat**. - -## Conclusion - -Congratulations, you now have an up and running Apache Kafka cluster, fully managed and secured. You are able to push and retrieve data easily via CLI. +Explore practical guidance on how to configure your OVHcloud Public Cloud Kafka cluster to accept incoming connections covering access setup via the control panel or API and essential network settings at: [Configure Kafka cluster connections](/pages/public_cloud/data_analytics/analytics/kafka_incoming_connections) ## Go further -[Kafka capabilities](/pages/public_cloud/public_cloud_databases/kafka_01_capabilities) - [Kafka Official documentation](https://kafka.apache.org/documentation/) [Kafka clients](https://cwiki.apache.org/confluence/display/KAFKA/Clients) diff --git a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.pt-pt.md b/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.pt-pt.md index 45123c53fb2..efe7b316b67 100644 --- a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.pt-pt.md +++ b/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/guide.pt-pt.md @@ -1,256 +1,35 @@ --- title: Kafka - Getting started -excerpt: Find out how to set up and manage your Kafka service -updated: 2025-08-04 +excerpt: Discover Kafka on OVHcloud Public Cloud +updated: 2025-08-25 --- -## Objectives +Kafka on OVHcloud is a fully managed, distributed data-streaming platform available through OVHcloud’s Public Cloud. Built on the open-source strength of Apache Kafka, it’s crafted for event-driven applications, near-real-time data transfer, streaming analytics, and high-throughput pipelines. Your data is stored in organized topics and partitions, enabling OVHcloud to handle multiple simultaneous data flows with scalability and resilience. With intuitive management via the OVHcloud Control Panel (or API/Terraform), you can provision, configure, and deploy Kafka clusters in minutes, offloading infrastructure management and keeping focus squarely on your data-driven use cases. -Apache Kafka is an open-source and highly resilient event streaming platform based on 3 main capabilities: +## Most viewed resources -- write or read data to/from stream events; -- store streams of events; -- process streams of events. +### Product page -You can get more information on Kafka from the [official Kafka website](https://kafka.apache.org/intro){.external}. +Discover how OVHcloud's Managed Apache Kafka® service lets you deploy and manage full Apache Kafka clusters within the Public Cloud complete with scalable topics and partitions, integrated tools like Schema Registry and high availability, plus support for Terraform, private networking, and competitive pay-as-you-go pricing at: [Kafka product page](/links/public-cloud/analytics-kafka) -This guide explains how to successfully configure Public Cloud Databases for Kafka via the OVHcloud Control Panel. +### Pricing -## Requirements +Explore a comprehensive breakdown of OVHcloud’s Public Cloud pricing, where you’ll find detailed hourly and monthly rates across services like virtual instances, managed databases, streaming and analytics tools—including plans for Kafka at: [Kafka pricing](/links/public-cloud/prices-kafka) -- access to the [OVHcloud Control Panel](/links/manager) -- a [Public Cloud project](/links/public-cloud/public-cloud) in your OVHcloud account +### Capabilities -## Instructions +Discover the technical capabilities and limitations of OVHcloud's managed Apache Kafka with detailed guidance on supported versions, resource limits, networking, logging, topics, users, and much more—on this support page: [Kafka capabilities and limitations](/pages/public_cloud/public_cloud_databases/kafka_01_capabilities) -### Subscribe to the service +### Create a cluster -Log in to your [OVHcloud Control Panel](/links/manager) and switch to `Public Cloud`{.action} in the top navigation bar. After selecting your Public Cloud project, click on `Data Streaming`{.action} in the left-hand navigation bar under **Databases & Analytics**. +Explore step-by-step instructions on how to create a managed Kafka database cluster on OVHcloud Public Cloud via the Control Panel, datacentre selection, plan, instance configuration, storage sizing, and cluster setup options at: [Create a Kafka cluster](/pages/public_cloud/data_analytics/analytics/kafka_create_cluster) -Click the `Create a service`{.action} button. +### Configure cluster for incoming connections -#### Select your analytics service - -Click on the type of analytics service you want to use and its version. -A random name is generated for your service that can change in this step or later. - -![Choose data streaming service](images/kafka_db_type.v2.png){.thumbnail} - -#### Select a datacentre - -Choose the geographical region of the datacentre where your service will be hosted and the deployment mode (1-AZ vs 3-AZ). - -![Choose a datacentre](images/kafka_region.v2.png){.thumbnail} - -#### Select a plan - -In this step, choose an appropriate service plan. If needed, you will be able to upgrade or downgrade the plan after creation. - -![Choose plan](images/kafka_solution_business.v2.png){.thumbnail} - -Please visit the [capabilities page](/products/public-cloud-data-analytics) of your selected analytics service for detailed information on each plan's properties. - -#### Select the instance - -Choose the instance type for the nodes of your service, you will be able to change it afterward. The number of nodes depends on the plan previously chosen. - -![Choose instance](images/kafka_features.v2.png){.thumbnail} - -#### Select the storage - -Storage could be scaled up to 3 time the base storage. - -![Choose storage](images/kafka_storage.v2.png){.thumbnail} - -#### Configure your options - -Choose the network options for your service and whitelist the IP addresses that will access the service. - -![Configure options](images/kafka_options.v2.png){.thumbnail} - -#### Review and confirm - -A summary of your order is display to help you review your service configuration. - -![Review order](images/kafka_configuration.1.v2.png){.thumbnail} - -The components of the price is also summarized with a monthly estimation. - -![Review pricing](images/kafka_configuration.2.v2.png){.thumbnail} - -Click the `API and Terraform equivalent`{.action} button to open the following window: - -![API and Terraform equivalent](images/kafka_configuration.3.v2.png){.thumbnail} - -The informations displayed in this window could help you automate your service creation with the [OVHcloud API](/pages/manage_and_operate/api/first-steps) or the OVHcloud Terraform Provider. - -When you are ready click the `Order`{.action} button to create your service. -In a matter of minutes, your new Apache Kafka service will be deployed. -Messages in the OVHcloud Control Panel will inform you when the streaming tool is ready to use. - -### Configure the Apache Kafka service - -Once the Public Cloud Databases for Kafka service is up and running, you will have to define at least one user and one authorised IP (if not already provided during the order) in order to fully connect to the service (as producer or consumer). - -![Kafka Concept](images/kafka_concept.png){.thumbnail} - -The `Dashboard`{.action} tab automatically updates when your service is ready. - -![Kafka General information](images/kafka_cluster_ready_to_configure.v2.png){.thumbnail} - -#### Mandatory: Set up a user - -Switch to the `Users`{.action} tab. An admin user name `avnadmin` is preconfigured during the service installation. - -![Users](images/kafka_users.v2.png){.thumbnail} - -You can add more users by clicking the `Add user`{.action} button. - -![Add a user](images/kafka_add_user.v2.png){.thumbnail} - -Enter a username, then click `Create User`{.action}. - -Passwords need to be reset from the `Users`{.action} table. - -![Password reset](images/kafka_user_password_reset1.v2.png){.thumbnail} - -#### Mandatory: Configure authorised IPs - -> [!warning] -> For security reasons the default network configuration doesn't allow any incoming connections. It is thus critical to authorize the suitable IP addresses in order to successfully access your Kafka cluster. - -If you did not define the authorised IPs during the order you could do it in the `Configuration`{.action} tab. At least one IP address must be authorised here before you can connect to your database. - -![Authorised IP](images/kafka_authorized_ip.v2.png){.thumbnail} - -Add the IP address of your computer by using the `Current IP`{.action} button. -You will be able to remove IPs from the table afterward. - -![Add IP](images/kafka_add_ip.v2.png){.thumbnail} - -Your Apache Kafka service is now fully accessible! -Optionally, you can configure access control lists (ACL) for granular permissions and create something called topics, as shown below. - -#### Optional: Create Kafka topics - -Topics can be seen as categories, allowing you to organize your Kafka records. Producers write to topics, and consumers read from topics. - -To create Kafka topics, first go to the `Topics`{.action} tab then click on the `Add a topic`{.action} button: - -![Add a topic](images/kafka_topics.v2.png){.thumbnail} - -In advanced configuration you can change the default value for the following parameters: - -- Minimum in-sync replica (2 by default) -- Partitions (1 partition by default) -- Replication (3 brokers by default) -- Retention size in bytes (-1: no limitation by default) -- Retention time in hours (-1: no limitation by default) -- Deletion policy - -![Create a topic](images/kafka_create_topic.v2.png){.thumbnail} - -#### Optional: Configure ACLs on topics - -Kafka supports access control lists (ACLs) to manage permissions on topics. This approach allows you to limit the operations that are available to specific connections and to restrict access to certain data sets, which improves the security of your data. - -By default the admin user has access to all topics with admin privileges. You can define some additional ACLs for all users / topics, by clicking on the `Add an ACL`{.action} button from the `ACL`{.action} tab: - -![Enable ACLs](images/kafka_acl.v2.png){.thumbnail} - -For a particular user, and one topic (or all with '*'), define the ACL with the following permissions: - -- **admin**: full access to APIs and topic -- **read**: allow only searching and retrieving data from a topic -- **write**: allow updating, adding, and deleting data from a topic -- **readwrite**: full access to the topic - -![Define ACLs](images/kafka_add_entry1.v2.png){.thumbnail} - -*Note*: Write permission allows the service user to create new indexes that match the pattern, but it does not allow deletion of those indexes. - -When multiple rules match, they are applied in the order listed above. If no rules match, access is denied. - -### First CLI connection - -> [!warning] -> Verify that the IP address visible from your browser application is part of the "Authorised IPs" defined for this Kafka service. -> -> Check also that the user has granted ACLs for the target topics. - -#### Download server and user certificates - -In order to connect to the Apache Kafka service, it is required to use server and user certificates. - -##### Server certificate - -The server CA (*Certificate Authority*) certificate can be downloaded from the `Dashboard`{.action} tab: - -![Kafka server certificate](images/kafka_get_server_certificate.v2.png){.thumbnail} - -##### User certificate and access key - -The user certificate and the user access key can be downloaded from the `Users`{.action} tab: - -![User informations](images/kafka_user_certificate_and_access_key.v2.png){.thumbnail} - -#### Install an Apache Kafka CLI - -As part of the Apache Kafka official installation, you will get different scripts that will also allow you to connect to Kafka in a Java 8+ environment: [Apache Kafka Official Quickstart](https://kafka.apache.org/quickstart). - -We propose to use a generic and more lightweight (does not require a JVM) producer and consumer client instead: `Kcat` (formerly known as `kafkacat`). - -##### **Install Kcat** - -For this client installation, please follow the instructions available at: [Kafkacat Official Github](https://github.com/edenhill/kcat). - -##### **Kcat configuration file** - -Let's create a configuration file to simplify the CLI commands to act as Kafka Producer and Consumer: - -kafkacat.conf : - -```text -bootstrap.servers=kafka-f411d2ae-f411d2ae.database.cloud.ovh.net:20186 -enable.ssl.certificate.verification=false -ssl.ca.location=/home/user/kafkacat/ca.pem -security.protocol=ssl -ssl.key.location=/home/user/kafkacat/service.key -ssl.certificate.location=/home/user/kafkacat/service.cert -``` - -In our example, the cluster address and port are **kafka-f411d2ae-f411d2ae.database.cloud.ovh.net:20186** and the previously downloaded CA certificates are in the **/home/user/kafkacat/** folder. - -Change theses values according to your own configuration. - -##### **Kafka producer** - -For this first example let's push the "test-message-key" and its "test-message-content" to the "my-topic" topic. - -```bash -echo test-message-content | kcat -F kafkacat.conf -P -t my-topic -k test-message-key -``` - -*Note*: depending on the installed binary, the CLI command can be either **kcat** or **kafkacat**. - -##### **Kafka consumer** - -The data can be retrieved from "my-topic". - -```bash -kcat -F kafkacat.conf -C -t my-topic -o -1 -e -``` - -*Note*: depending on the installed binary, the CLI command can be either **kcat** or **kafkacat**. - -## Conclusion - -Congratulations, you now have an up and running Apache Kafka cluster, fully managed and secured. You are able to push and retrieve data easily via CLI. +Explore practical guidance on how to configure your OVHcloud Public Cloud Kafka cluster to accept incoming connections covering access setup via the control panel or API and essential network settings at: [Configure Kafka cluster connections](/pages/public_cloud/data_analytics/analytics/kafka_incoming_connections) ## Go further -[Kafka capabilities](/pages/public_cloud/public_cloud_databases/kafka_01_capabilities) - [Kafka Official documentation](https://kafka.apache.org/documentation/) [Kafka clients](https://cwiki.apache.org/confluence/display/KAFKA/Clients) diff --git a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/images/kafka_acl.v2.png b/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/images/kafka_acl.v2.png deleted file mode 100644 index 75c86e7e189..00000000000 Binary files a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/images/kafka_acl.v2.png and /dev/null differ diff --git a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/meta.yaml b/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/meta.yaml index c2f786cd6f7..9eeddeac469 100755 --- a/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/meta.yaml +++ b/pages/public_cloud/public_cloud_databases/kafka_02_getting_started/meta.yaml @@ -2,4 +2,4 @@ id: f15354bd-dbc3-432f-a389-49f1c70da327 full_slug: public-cloud-databases-kafka-getting-started engine: kafka section: dashboard -reference_category: public-cloud-databases-kafka-guides \ No newline at end of file +reference_category: public-cloud-data-analytics-kafka-getting-started \ No newline at end of file diff --git a/pages/public_cloud/public_cloud_databases/kafka_03_advanced_parameters_references/meta.yaml b/pages/public_cloud/public_cloud_databases/kafka_03_advanced_parameters_references/meta.yaml index a31c1f7413b..876500d5f09 100644 --- a/pages/public_cloud/public_cloud_databases/kafka_03_advanced_parameters_references/meta.yaml +++ b/pages/public_cloud/public_cloud_databases/kafka_03_advanced_parameters_references/meta.yaml @@ -2,4 +2,4 @@ id: dfd74711-429f-4b93-84f8-674375756e83 full_slug: public-cloud-databases-kafka-advanced-parameters-references engine: kafka section: settings -reference_category: public-cloud-databases-kafka-guides \ No newline at end of file +reference_category: public-cloud-data-analytics-kafka-advanced-guides \ No newline at end of file diff --git a/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.de-de.md b/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.de-de.md index ffa8e556429..0860ef7e730 100644 --- a/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.de-de.md +++ b/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.de-de.md @@ -1,7 +1,7 @@ --- -title: "Kafka - Python 101" -excerpt: "Code your first Python applications using Public Cloud Databases for Kafka" -updated: 2025-08-04 +title: Kafka - Create publisher and consumer applications +excerpt: Develop your first Python applications using Kafka +updated: 2025-08-25 --- ## Objective diff --git a/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.en-asia.md b/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.en-asia.md index ffa8e556429..0eeb0c6944d 100644 --- a/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.en-asia.md +++ b/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.en-asia.md @@ -1,7 +1,7 @@ --- -title: "Kafka - Python 101" -excerpt: "Code your first Python applications using Public Cloud Databases for Kafka" -updated: 2025-08-04 +title: "Kafka - Create publisher and consumer applications" +excerpt: "Develop your first Python applications using Kafka" +updated: 2025-08-25 --- ## Objective diff --git a/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.en-au.md b/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.en-au.md index ffa8e556429..0860ef7e730 100644 --- a/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.en-au.md +++ b/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.en-au.md @@ -1,7 +1,7 @@ --- -title: "Kafka - Python 101" -excerpt: "Code your first Python applications using Public Cloud Databases for Kafka" -updated: 2025-08-04 +title: Kafka - Create publisher and consumer applications +excerpt: Develop your first Python applications using Kafka +updated: 2025-08-25 --- ## Objective diff --git a/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.en-ca.md b/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.en-ca.md index ffa8e556429..0860ef7e730 100644 --- a/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.en-ca.md +++ b/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.en-ca.md @@ -1,7 +1,7 @@ --- -title: "Kafka - Python 101" -excerpt: "Code your first Python applications using Public Cloud Databases for Kafka" -updated: 2025-08-04 +title: Kafka - Create publisher and consumer applications +excerpt: Develop your first Python applications using Kafka +updated: 2025-08-25 --- ## Objective diff --git a/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.en-gb.md b/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.en-gb.md index ffa8e556429..0860ef7e730 100644 --- a/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.en-gb.md +++ b/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.en-gb.md @@ -1,7 +1,7 @@ --- -title: "Kafka - Python 101" -excerpt: "Code your first Python applications using Public Cloud Databases for Kafka" -updated: 2025-08-04 +title: Kafka - Create publisher and consumer applications +excerpt: Develop your first Python applications using Kafka +updated: 2025-08-25 --- ## Objective diff --git a/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.en-ie.md b/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.en-ie.md index ffa8e556429..0860ef7e730 100644 --- a/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.en-ie.md +++ b/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.en-ie.md @@ -1,7 +1,7 @@ --- -title: "Kafka - Python 101" -excerpt: "Code your first Python applications using Public Cloud Databases for Kafka" -updated: 2025-08-04 +title: Kafka - Create publisher and consumer applications +excerpt: Develop your first Python applications using Kafka +updated: 2025-08-25 --- ## Objective diff --git a/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.en-sg.md b/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.en-sg.md index ffa8e556429..0860ef7e730 100644 --- a/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.en-sg.md +++ b/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.en-sg.md @@ -1,7 +1,7 @@ --- -title: "Kafka - Python 101" -excerpt: "Code your first Python applications using Public Cloud Databases for Kafka" -updated: 2025-08-04 +title: Kafka - Create publisher and consumer applications +excerpt: Develop your first Python applications using Kafka +updated: 2025-08-25 --- ## Objective diff --git a/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.en-us.md b/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.en-us.md index ffa8e556429..0860ef7e730 100644 --- a/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.en-us.md +++ b/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.en-us.md @@ -1,7 +1,7 @@ --- -title: "Kafka - Python 101" -excerpt: "Code your first Python applications using Public Cloud Databases for Kafka" -updated: 2025-08-04 +title: Kafka - Create publisher and consumer applications +excerpt: Develop your first Python applications using Kafka +updated: 2025-08-25 --- ## Objective diff --git a/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.es-es.md b/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.es-es.md index ffa8e556429..0860ef7e730 100644 --- a/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.es-es.md +++ b/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.es-es.md @@ -1,7 +1,7 @@ --- -title: "Kafka - Python 101" -excerpt: "Code your first Python applications using Public Cloud Databases for Kafka" -updated: 2025-08-04 +title: Kafka - Create publisher and consumer applications +excerpt: Develop your first Python applications using Kafka +updated: 2025-08-25 --- ## Objective diff --git a/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.es-us.md b/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.es-us.md index ffa8e556429..0860ef7e730 100644 --- a/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.es-us.md +++ b/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.es-us.md @@ -1,7 +1,7 @@ --- -title: "Kafka - Python 101" -excerpt: "Code your first Python applications using Public Cloud Databases for Kafka" -updated: 2025-08-04 +title: Kafka - Create publisher and consumer applications +excerpt: Develop your first Python applications using Kafka +updated: 2025-08-25 --- ## Objective diff --git a/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.fr-ca.md b/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.fr-ca.md index ffa8e556429..0860ef7e730 100644 --- a/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.fr-ca.md +++ b/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.fr-ca.md @@ -1,7 +1,7 @@ --- -title: "Kafka - Python 101" -excerpt: "Code your first Python applications using Public Cloud Databases for Kafka" -updated: 2025-08-04 +title: Kafka - Create publisher and consumer applications +excerpt: Develop your first Python applications using Kafka +updated: 2025-08-25 --- ## Objective diff --git a/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.fr-fr.md b/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.fr-fr.md index ffa8e556429..0860ef7e730 100644 --- a/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.fr-fr.md +++ b/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.fr-fr.md @@ -1,7 +1,7 @@ --- -title: "Kafka - Python 101" -excerpt: "Code your first Python applications using Public Cloud Databases for Kafka" -updated: 2025-08-04 +title: Kafka - Create publisher and consumer applications +excerpt: Develop your first Python applications using Kafka +updated: 2025-08-25 --- ## Objective diff --git a/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.it-it.md b/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.it-it.md index ffa8e556429..0860ef7e730 100644 --- a/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.it-it.md +++ b/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.it-it.md @@ -1,7 +1,7 @@ --- -title: "Kafka - Python 101" -excerpt: "Code your first Python applications using Public Cloud Databases for Kafka" -updated: 2025-08-04 +title: Kafka - Create publisher and consumer applications +excerpt: Develop your first Python applications using Kafka +updated: 2025-08-25 --- ## Objective diff --git a/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.pl-pl.md b/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.pl-pl.md index ffa8e556429..0860ef7e730 100644 --- a/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.pl-pl.md +++ b/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.pl-pl.md @@ -1,7 +1,7 @@ --- -title: "Kafka - Python 101" -excerpt: "Code your first Python applications using Public Cloud Databases for Kafka" -updated: 2025-08-04 +title: Kafka - Create publisher and consumer applications +excerpt: Develop your first Python applications using Kafka +updated: 2025-08-25 --- ## Objective diff --git a/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.pt-pt.md b/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.pt-pt.md index ffa8e556429..0860ef7e730 100644 --- a/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.pt-pt.md +++ b/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/guide.pt-pt.md @@ -1,7 +1,7 @@ --- -title: "Kafka - Python 101" -excerpt: "Code your first Python applications using Public Cloud Databases for Kafka" -updated: 2025-08-04 +title: Kafka - Create publisher and consumer applications +excerpt: Develop your first Python applications using Kafka +updated: 2025-08-25 --- ## Objective diff --git a/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/meta.yaml b/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/meta.yaml index 9d6db15c07e..3441a69f6bf 100755 --- a/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/meta.yaml +++ b/pages/public_cloud/public_cloud_databases/kafka_04_dev_python_basics/meta.yaml @@ -2,4 +2,4 @@ id: c25a4e2a-a600-4acb-807b-7035145e0a9d full_slug: public-cloud-databases-kafka-dev-python-basics engine: kafka section: dashboard -reference_category: public-cloud-databases-kafka-guides \ No newline at end of file +reference_category: public-cloud-data-analytics-kafka-tutorials \ No newline at end of file