From 0d6dd0fe17c34b3dce4da64bcd02b84e8851f686 Mon Sep 17 00:00:00 2001 From: Luca Belluccini Date: Tue, 10 Sep 2024 18:09:25 +0200 Subject: [PATCH 01/11] Update output-logstash.asciidoc --- .../outputs/output-logstash.asciidoc | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/docs/en/ingest-management/elastic-agent/configuration/outputs/output-logstash.asciidoc b/docs/en/ingest-management/elastic-agent/configuration/outputs/output-logstash.asciidoc index aec36d059..3ef0bd693 100644 --- a/docs/en/ingest-management/elastic-agent/configuration/outputs/output-logstash.asciidoc +++ b/docs/en/ingest-management/elastic-agent/configuration/outputs/output-logstash.asciidoc @@ -33,7 +33,10 @@ The {ls} configuration pipeline listens for incoming {agent} connections, processes received events, and then sends the events to {es}. The following example configures a {ls} pipeline that listens on port `5044` for -incoming {agent} connections and routes received events to {es}: +incoming {agent} connections and routes received events to {es}. + +The {ls} pipeline definition below is an example. Please refer to the `Additional Logstash +configuration required` steps when creating the {ls} output in the Fleet outputs page. [source,yaml] ---- @@ -41,19 +44,28 @@ input { elastic_agent { port => 5044 enrich => none # don't modify the events' schema at all - # or minimal change, add only ssl and source metadata - # enrich => [ssl_peer_metadata, source_metadata] + ssl => true + ssl_certificate_authorities => [""] + ssl_certificate => "" + ssl_key => "" + ssl_verify_mode => "force_peer" } } output { elasticsearch { hosts => ["http://localhost:9200"] <1> + # cloud_id => "..." data_stream => "true" + api_key => "" <2> + data_stream => true + ssl => true + # cacert => "" } } ---- <1> The {es} server and the port (`9200`) where {es} is running. +<2> The API Key obtained from the {ls} output creation steps in Fleet. For more information about configuring {ls}, refer to {logstash-ref}/configuration.html[Configuring {ls}] and From 38835fea0271177dcdc863899b3768093a393c3b Mon Sep 17 00:00:00 2001 From: Luca Belluccini Date: Tue, 10 Sep 2024 18:36:35 +0200 Subject: [PATCH 02/11] Update output-kafka.asciidoc --- .../outputs/output-kafka.asciidoc | 25 ++++++++++++++++++- 1 file changed, 24 insertions(+), 1 deletion(-) diff --git a/docs/en/ingest-management/elastic-agent/configuration/outputs/output-kafka.asciidoc b/docs/en/ingest-management/elastic-agent/configuration/outputs/output-kafka.asciidoc index 839a2c043..b422dcab6 100644 --- a/docs/en/ingest-management/elastic-agent/configuration/outputs/output-kafka.asciidoc +++ b/docs/en/ingest-management/elastic-agent/configuration/outputs/output-kafka.asciidoc @@ -44,6 +44,29 @@ outputs: verification_mode: full ---- +== Kafka output and using Logstash to index data to Elasticsearch + +If you are considering using {ls} to ship the data from `kafka` to {es}, please +be aware Elastic is not currently testing this kind of setup. + +The structure of the documents sent from {agent} to `kafka` must not be modified by {ls}. +We suggest disabling `ecs_compatibility` on both the `kafka` input and the `json` codec. + +Refer to {ls} output for {agent} for more details. + +[source,yaml] +---- +inputs { + kafka { + ... + ecs_compatibility => "disabled" + codec => json { ecs_compatibility => "disabled" } + ... + } +} +... +---- + == Kafka output configuration settings The `kafka` output supports the following settings, grouped by category. @@ -502,4 +525,4 @@ Note: If set to 0, no ACKs are returned by Kafka. Messages might be lost silentl // ============================================================================= -|=== \ No newline at end of file +|=== From 8e003b82b7f7976a11ec3e58c0bf1288c272826a Mon Sep 17 00:00:00 2001 From: Luca Belluccini Date: Tue, 10 Sep 2024 18:38:30 +0200 Subject: [PATCH 03/11] Update fleet-settings-output-kafka.asciidoc --- .../fleet-settings-output-kafka.asciidoc | 23 +++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/docs/en/ingest-management/fleet/fleet-settings-output-kafka.asciidoc b/docs/en/ingest-management/fleet/fleet-settings-output-kafka.asciidoc index d7f7b1a29..1d91c260e 100644 --- a/docs/en/ingest-management/fleet/fleet-settings-output-kafka.asciidoc +++ b/docs/en/ingest-management/fleet/fleet-settings-output-kafka.asciidoc @@ -5,6 +5,29 @@ Specify these settings to send data over a secure connection to Kafka. In the {fleet} <>, make sure that the Kafka output type is selected. +== Kafka output and using Logstash to index data to Elasticsearch + +If you are considering using {ls} to ship the data from `kafka` to {es}, please +be aware Elastic is not currently testing this kind of setup. + +The structure of the documents sent from {agent} to `kafka` must not be modified by {ls}. +We suggest disabling `ecs_compatibility` on both the `kafka` input and the `json` codec. + +Refer to {ls} output for {agent} for more details. + +[source,yaml] +---- +inputs { + kafka { + ... + ecs_compatibility => "disabled" + codec => json { ecs_compatibility => "disabled" } + ... + } +} +... +---- + [discrete] == General settings From 18ddfb039f73d67c3abaaea16df87a97330573fe Mon Sep 17 00:00:00 2001 From: Luca Belluccini Date: Tue, 10 Sep 2024 18:42:56 +0200 Subject: [PATCH 04/11] Update fleet-settings-output-logstash.asciidoc --- .../fleet-settings-output-logstash.asciidoc | 40 ++++++++++++++++++- 1 file changed, 39 insertions(+), 1 deletion(-) diff --git a/docs/en/ingest-management/fleet/fleet-settings-output-logstash.asciidoc b/docs/en/ingest-management/fleet/fleet-settings-output-logstash.asciidoc index b96778f78..66fdea424 100644 --- a/docs/en/ingest-management/fleet/fleet-settings-output-logstash.asciidoc +++ b/docs/en/ingest-management/fleet/fleet-settings-output-logstash.asciidoc @@ -13,6 +13,44 @@ Before using the {ls} output, you need to make sure that for any integrations th To learn how to generate certificates, refer to <>. +To receive the events in {ls}, you also need to create a {ls} configuration pipeline. +The {ls} configuration pipeline listens for incoming {agent} connections, +processes received events, and then sends the events to {es}. + +The following example configures a {ls} pipeline that listens on port `5044` for +incoming {agent} connections and routes received events to {es}. + +The {ls} pipeline definition below is an example. Please refer to the `Additional Logstash +configuration required` steps when creating the {ls} output in the Fleet outputs page. + +[source,yaml] +---- +input { + elastic_agent { + port => 5044 + enrich => none # don't modify the events' schema at all + ssl => true + ssl_certificate_authorities => [""] + ssl_certificate => "" + ssl_key => "" + ssl_verify_mode => "force_peer" + } +} +output { + elasticsearch { + hosts => ["http://localhost:9200"] <1> + # cloud_id => "..." + data_stream => "true" + api_key => "" <2> + data_stream => true + ssl => true + # cacert => "" + } +} +---- +<1> The {es} server and the port (`9200`) where {es} is running. +<2> The API Key obtained from the {ls} output creation steps in Fleet. + [cols="2* Date: Tue, 10 Sep 2024 18:45:29 +0200 Subject: [PATCH 05/11] Update output-logstash.asciidoc --- .../configuration/outputs/output-logstash.asciidoc | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/docs/en/ingest-management/elastic-agent/configuration/outputs/output-logstash.asciidoc b/docs/en/ingest-management/elastic-agent/configuration/outputs/output-logstash.asciidoc index 3ef0bd693..327e66535 100644 --- a/docs/en/ingest-management/elastic-agent/configuration/outputs/output-logstash.asciidoc +++ b/docs/en/ingest-management/elastic-agent/configuration/outputs/output-logstash.asciidoc @@ -35,8 +35,7 @@ processes received events, and then sends the events to {es}. The following example configures a {ls} pipeline that listens on port `5044` for incoming {agent} connections and routes received events to {es}. -The {ls} pipeline definition below is an example. Please refer to the `Additional Logstash -configuration required` steps when creating the {ls} output in the Fleet outputs page. +The {ls} pipeline definition below is an example. [source,yaml] ---- @@ -65,7 +64,7 @@ output { } ---- <1> The {es} server and the port (`9200`) where {es} is running. -<2> The API Key obtained from the {ls} output creation steps in Fleet. +<2> The API Key used by {ls} to ship data to the destination data streams. For more information about configuring {ls}, refer to {logstash-ref}/configuration.html[Configuring {ls}] and From 9d7f712863cbc4bdc728064e4efe6b1514a57221 Mon Sep 17 00:00:00 2001 From: Luca Belluccini Date: Tue, 10 Sep 2024 23:03:37 +0200 Subject: [PATCH 06/11] Update docs/en/ingest-management/fleet/fleet-settings-output-kafka.asciidoc Co-authored-by: David Kilfoyle <41695641+kilfoyle@users.noreply.github.com> --- .../fleet/fleet-settings-output-kafka.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/en/ingest-management/fleet/fleet-settings-output-kafka.asciidoc b/docs/en/ingest-management/fleet/fleet-settings-output-kafka.asciidoc index 1d91c260e..1b90b76e0 100644 --- a/docs/en/ingest-management/fleet/fleet-settings-output-kafka.asciidoc +++ b/docs/en/ingest-management/fleet/fleet-settings-output-kafka.asciidoc @@ -5,7 +5,7 @@ Specify these settings to send data over a secure connection to Kafka. In the {fleet} <>, make sure that the Kafka output type is selected. -== Kafka output and using Logstash to index data to Elasticsearch +== Kafka output and using {ls} to index data to {es} If you are considering using {ls} to ship the data from `kafka` to {es}, please be aware Elastic is not currently testing this kind of setup. From 2c55ed8cb5b65c7522257ded412b91c16bab8b25 Mon Sep 17 00:00:00 2001 From: Luca Belluccini Date: Tue, 10 Sep 2024 23:03:49 +0200 Subject: [PATCH 07/11] Update docs/en/ingest-management/elastic-agent/configuration/outputs/output-kafka.asciidoc Co-authored-by: David Kilfoyle <41695641+kilfoyle@users.noreply.github.com> --- .../elastic-agent/configuration/outputs/output-kafka.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/en/ingest-management/elastic-agent/configuration/outputs/output-kafka.asciidoc b/docs/en/ingest-management/elastic-agent/configuration/outputs/output-kafka.asciidoc index b422dcab6..ca5936b32 100644 --- a/docs/en/ingest-management/elastic-agent/configuration/outputs/output-kafka.asciidoc +++ b/docs/en/ingest-management/elastic-agent/configuration/outputs/output-kafka.asciidoc @@ -44,7 +44,7 @@ outputs: verification_mode: full ---- -== Kafka output and using Logstash to index data to Elasticsearch +== Kafka output and using {ls} to index data to {es} If you are considering using {ls} to ship the data from `kafka` to {es}, please be aware Elastic is not currently testing this kind of setup. From 9bd767f75c934c42d364c0af4a110868efd91c15 Mon Sep 17 00:00:00 2001 From: Luca Belluccini Date: Tue, 10 Sep 2024 23:03:59 +0200 Subject: [PATCH 08/11] Update docs/en/ingest-management/elastic-agent/configuration/outputs/output-logstash.asciidoc Co-authored-by: David Kilfoyle <41695641+kilfoyle@users.noreply.github.com> --- .../elastic-agent/configuration/outputs/output-logstash.asciidoc | 1 - 1 file changed, 1 deletion(-) diff --git a/docs/en/ingest-management/elastic-agent/configuration/outputs/output-logstash.asciidoc b/docs/en/ingest-management/elastic-agent/configuration/outputs/output-logstash.asciidoc index 327e66535..736d51f9c 100644 --- a/docs/en/ingest-management/elastic-agent/configuration/outputs/output-logstash.asciidoc +++ b/docs/en/ingest-management/elastic-agent/configuration/outputs/output-logstash.asciidoc @@ -35,7 +35,6 @@ processes received events, and then sends the events to {es}. The following example configures a {ls} pipeline that listens on port `5044` for incoming {agent} connections and routes received events to {es}. -The {ls} pipeline definition below is an example. [source,yaml] ---- From 193a5e7cd4c6bd30d211c6fd2d7ad7b2b11a9290 Mon Sep 17 00:00:00 2001 From: Luca Belluccini Date: Tue, 10 Sep 2024 23:04:06 +0200 Subject: [PATCH 09/11] Update docs/en/ingest-management/elastic-agent/configuration/outputs/output-logstash.asciidoc Co-authored-by: David Kilfoyle <41695641+kilfoyle@users.noreply.github.com> --- .../configuration/outputs/output-logstash.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/en/ingest-management/elastic-agent/configuration/outputs/output-logstash.asciidoc b/docs/en/ingest-management/elastic-agent/configuration/outputs/output-logstash.asciidoc index 736d51f9c..71dc07094 100644 --- a/docs/en/ingest-management/elastic-agent/configuration/outputs/output-logstash.asciidoc +++ b/docs/en/ingest-management/elastic-agent/configuration/outputs/output-logstash.asciidoc @@ -32,7 +32,7 @@ To receive the events in {ls}, you also need to create a {ls} configuration pipe The {ls} configuration pipeline listens for incoming {agent} connections, processes received events, and then sends the events to {es}. -The following example configures a {ls} pipeline that listens on port `5044` for +The following {ls} pipeline definition example configures a pipeline that listens on port `5044` for incoming {agent} connections and routes received events to {es}. From 3c77a5e81fd272f0fbbfe7bfb0a35cb274fe1698 Mon Sep 17 00:00:00 2001 From: Luca Belluccini Date: Tue, 10 Sep 2024 23:04:23 +0200 Subject: [PATCH 10/11] Update docs/en/ingest-management/elastic-agent/configuration/outputs/output-kafka.asciidoc Co-authored-by: David Kilfoyle <41695641+kilfoyle@users.noreply.github.com> --- .../elastic-agent/configuration/outputs/output-kafka.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/en/ingest-management/elastic-agent/configuration/outputs/output-kafka.asciidoc b/docs/en/ingest-management/elastic-agent/configuration/outputs/output-kafka.asciidoc index ca5936b32..0845fec2c 100644 --- a/docs/en/ingest-management/elastic-agent/configuration/outputs/output-kafka.asciidoc +++ b/docs/en/ingest-management/elastic-agent/configuration/outputs/output-kafka.asciidoc @@ -52,7 +52,7 @@ be aware Elastic is not currently testing this kind of setup. The structure of the documents sent from {agent} to `kafka` must not be modified by {ls}. We suggest disabling `ecs_compatibility` on both the `kafka` input and the `json` codec. -Refer to {ls} output for {agent} for more details. +Refer to <> documentation for more details. [source,yaml] ---- From d67311f248d3f5bc89a2610e89119843ba73d7d1 Mon Sep 17 00:00:00 2001 From: Luca Belluccini Date: Tue, 10 Sep 2024 23:04:28 +0200 Subject: [PATCH 11/11] Update docs/en/ingest-management/fleet/fleet-settings-output-kafka.asciidoc Co-authored-by: David Kilfoyle <41695641+kilfoyle@users.noreply.github.com> --- .../fleet/fleet-settings-output-kafka.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/en/ingest-management/fleet/fleet-settings-output-kafka.asciidoc b/docs/en/ingest-management/fleet/fleet-settings-output-kafka.asciidoc index 1b90b76e0..ea6f46fc4 100644 --- a/docs/en/ingest-management/fleet/fleet-settings-output-kafka.asciidoc +++ b/docs/en/ingest-management/fleet/fleet-settings-output-kafka.asciidoc @@ -13,7 +13,7 @@ be aware Elastic is not currently testing this kind of setup. The structure of the documents sent from {agent} to `kafka` must not be modified by {ls}. We suggest disabling `ecs_compatibility` on both the `kafka` input and the `json` codec. -Refer to {ls} output for {agent} for more details. +Refer to the <> documentation for more details. [source,yaml] ----