Skip to content

Commit 996c180

Browse files
api-clients-generation-pipeline[bot]ci.datadog-api-spec
andauthored
Add support for Array Processor in Logs Pipelines (#751)
Co-authored-by: ci.datadog-api-spec <[email protected]>
1 parent ca1e221 commit 996c180

File tree

29 files changed

+1579
-4
lines changed

29 files changed

+1579
-4
lines changed

.apigentools-info

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -4,13 +4,13 @@
44
"spec_versions": {
55
"v1": {
66
"apigentools_version": "1.6.6",
7-
"regenerated": "2025-07-02 20:43:19.225643",
8-
"spec_repo_commit": "e327e6df"
7+
"regenerated": "2025-07-03 15:49:25.633380",
8+
"spec_repo_commit": "d781944f"
99
},
1010
"v2": {
1111
"apigentools_version": "1.6.6",
12-
"regenerated": "2025-07-02 20:43:19.242229",
13-
"spec_repo_commit": "e327e6df"
12+
"regenerated": "2025-07-03 15:49:25.651023",
13+
"spec_repo_commit": "d781944f"
1414
}
1515
}
1616
}

.generator/schemas/v1/openapi.yaml

Lines changed: 138 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5163,6 +5163,143 @@ components:
51635163
type: string
51645164
x-enum-varnames:
51655165
- ARITHMETIC_PROCESSOR
5166+
LogsArrayProcessor:
5167+
description: 'A processor for extracting, aggregating, or transforming values
5168+
from JSON arrays within your logs.
5169+
5170+
Supported operations are:
5171+
5172+
- Select value from matching element
5173+
5174+
- Compute array length
5175+
5176+
- Append a value to an array'
5177+
properties:
5178+
is_enabled:
5179+
default: false
5180+
description: Whether or not the processor is enabled.
5181+
type: boolean
5182+
name:
5183+
description: Name of the processor.
5184+
type: string
5185+
operation:
5186+
$ref: '#/components/schemas/LogsArrayProcessorOperation'
5187+
type:
5188+
$ref: '#/components/schemas/LogsArrayProcessorType'
5189+
required:
5190+
- operation
5191+
- type
5192+
type: object
5193+
LogsArrayProcessorOperation:
5194+
description: Configuration of the array processor operation to perform.
5195+
oneOf:
5196+
- $ref: '#/components/schemas/LogsArrayProcessorOperationAppend'
5197+
- $ref: '#/components/schemas/LogsArrayProcessorOperationLength'
5198+
- $ref: '#/components/schemas/LogsArrayProcessorOperationSelect'
5199+
LogsArrayProcessorOperationAppend:
5200+
description: Operation that appends a value to a target array attribute.
5201+
properties:
5202+
preserve_source:
5203+
default: true
5204+
description: Remove or preserve the remapped source element.
5205+
type: boolean
5206+
source:
5207+
description: Attribute path containing the value to append.
5208+
example: network.client.ip
5209+
type: string
5210+
target:
5211+
description: Attribute path of the array to append to.
5212+
example: sourceIps
5213+
type: string
5214+
type:
5215+
$ref: '#/components/schemas/LogsArrayProcessorOperationAppendType'
5216+
required:
5217+
- type
5218+
- source
5219+
- target
5220+
type: object
5221+
LogsArrayProcessorOperationAppendType:
5222+
description: Operation type.
5223+
enum:
5224+
- append
5225+
example: append
5226+
type: string
5227+
x-enum-varnames:
5228+
- APPEND
5229+
LogsArrayProcessorOperationLength:
5230+
description: Operation that computes the length of a `source` array and stores
5231+
the result in the `target` attribute.
5232+
properties:
5233+
source:
5234+
description: Attribute path of the array to measure.
5235+
example: tags
5236+
type: string
5237+
target:
5238+
description: Attribute that receives the computed length.
5239+
example: tagCount
5240+
type: string
5241+
type:
5242+
$ref: '#/components/schemas/LogsArrayProcessorOperationLengthType'
5243+
required:
5244+
- type
5245+
- source
5246+
- target
5247+
type: object
5248+
LogsArrayProcessorOperationLengthType:
5249+
description: Operation type.
5250+
enum:
5251+
- length
5252+
example: length
5253+
type: string
5254+
x-enum-varnames:
5255+
- LENGTH
5256+
LogsArrayProcessorOperationSelect:
5257+
description: Operation that finds an object in a `source` array using a `filter`,
5258+
and then extracts a specific value into the `target` attribute.
5259+
properties:
5260+
filter:
5261+
description: Filter condition expressed as `key:value` used to find the
5262+
matching element.
5263+
example: name:Referrer
5264+
type: string
5265+
source:
5266+
description: Attribute path of the array to search into.
5267+
example: httpRequest.headers
5268+
type: string
5269+
target:
5270+
description: Attribute that receives the extracted value.
5271+
example: referrer
5272+
type: string
5273+
type:
5274+
$ref: '#/components/schemas/LogsArrayProcessorOperationSelectType'
5275+
value_to_extract:
5276+
description: Key of the value to extract from the matching element.
5277+
example: value
5278+
type: string
5279+
required:
5280+
- type
5281+
- source
5282+
- target
5283+
- filter
5284+
- value_to_extract
5285+
type: object
5286+
LogsArrayProcessorOperationSelectType:
5287+
description: Operation type.
5288+
enum:
5289+
- select
5290+
example: select
5291+
type: string
5292+
x-enum-varnames:
5293+
- SELECT
5294+
LogsArrayProcessorType:
5295+
default: array-processor
5296+
description: Type of logs array processor.
5297+
enum:
5298+
- array-processor
5299+
example: array-processor
5300+
type: string
5301+
x-enum-varnames:
5302+
- ARRAY_PROCESSOR
51665303
LogsAttributeRemapper:
51675304
description: 'The remapper processor remaps any source attribute(s) or tag to
51685305
another target attribute or tag.
@@ -6060,6 +6197,7 @@ components:
60606197
- $ref: '#/components/schemas/ReferenceTableLogsLookupProcessor'
60616198
- $ref: '#/components/schemas/LogsTraceRemapper'
60626199
- $ref: '#/components/schemas/LogsSpanRemapper'
6200+
- $ref: '#/components/schemas/LogsArrayProcessor'
60636201
LogsQueryCompute:
60646202
description: Define computation for a log query.
60656203
properties:
Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,40 @@
1+
// Create a pipeline with Array Processor Append Operation returns "OK" response
2+
use datadog_api_client::datadog;
3+
use datadog_api_client::datadogV1::api_logs_pipelines::LogsPipelinesAPI;
4+
use datadog_api_client::datadogV1::model::LogsArrayProcessor;
5+
use datadog_api_client::datadogV1::model::LogsArrayProcessorOperation;
6+
use datadog_api_client::datadogV1::model::LogsArrayProcessorOperationAppend;
7+
use datadog_api_client::datadogV1::model::LogsArrayProcessorOperationAppendType;
8+
use datadog_api_client::datadogV1::model::LogsArrayProcessorType;
9+
use datadog_api_client::datadogV1::model::LogsFilter;
10+
use datadog_api_client::datadogV1::model::LogsPipeline;
11+
use datadog_api_client::datadogV1::model::LogsProcessor;
12+
13+
#[tokio::main]
14+
async fn main() {
15+
let body = LogsPipeline::new("testPipelineArrayAppend".to_string())
16+
.filter(LogsFilter::new().query("source:python".to_string()))
17+
.processors(vec![LogsProcessor::LogsArrayProcessor(Box::new(
18+
LogsArrayProcessor::new(
19+
LogsArrayProcessorOperation::LogsArrayProcessorOperationAppend(Box::new(
20+
LogsArrayProcessorOperationAppend::new(
21+
"network.client.ip".to_string(),
22+
"sourceIps".to_string(),
23+
LogsArrayProcessorOperationAppendType::APPEND,
24+
),
25+
)),
26+
LogsArrayProcessorType::ARRAY_PROCESSOR,
27+
)
28+
.is_enabled(true)
29+
.name("append_ip_to_array".to_string()),
30+
))])
31+
.tags(vec![]);
32+
let configuration = datadog::Configuration::new();
33+
let api = LogsPipelinesAPI::with_config(configuration);
34+
let resp = api.create_logs_pipeline(body).await;
35+
if let Ok(value) = resp {
36+
println!("{:#?}", value);
37+
} else {
38+
println!("{:#?}", resp.unwrap_err());
39+
}
40+
}
Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,42 @@
1+
// Create a pipeline with Array Processor Select Operation returns "OK" response
2+
use datadog_api_client::datadog;
3+
use datadog_api_client::datadogV1::api_logs_pipelines::LogsPipelinesAPI;
4+
use datadog_api_client::datadogV1::model::LogsArrayProcessor;
5+
use datadog_api_client::datadogV1::model::LogsArrayProcessorOperation;
6+
use datadog_api_client::datadogV1::model::LogsArrayProcessorOperationSelect;
7+
use datadog_api_client::datadogV1::model::LogsArrayProcessorOperationSelectType;
8+
use datadog_api_client::datadogV1::model::LogsArrayProcessorType;
9+
use datadog_api_client::datadogV1::model::LogsFilter;
10+
use datadog_api_client::datadogV1::model::LogsPipeline;
11+
use datadog_api_client::datadogV1::model::LogsProcessor;
12+
13+
#[tokio::main]
14+
async fn main() {
15+
let body = LogsPipeline::new("testPipelineArraySelect".to_string())
16+
.filter(LogsFilter::new().query("source:python".to_string()))
17+
.processors(vec![LogsProcessor::LogsArrayProcessor(Box::new(
18+
LogsArrayProcessor::new(
19+
LogsArrayProcessorOperation::LogsArrayProcessorOperationSelect(Box::new(
20+
LogsArrayProcessorOperationSelect::new(
21+
"name:Referrer".to_string(),
22+
"httpRequest.headers".to_string(),
23+
"referrer".to_string(),
24+
LogsArrayProcessorOperationSelectType::SELECT,
25+
"value".to_string(),
26+
),
27+
)),
28+
LogsArrayProcessorType::ARRAY_PROCESSOR,
29+
)
30+
.is_enabled(true)
31+
.name("extract_referrer".to_string()),
32+
))])
33+
.tags(vec![]);
34+
let configuration = datadog::Configuration::new();
35+
let api = LogsPipelinesAPI::with_config(configuration);
36+
let resp = api.create_logs_pipeline(body).await;
37+
if let Ok(value) = resp {
38+
println!("{:#?}", value);
39+
} else {
40+
println!("{:#?}", resp.unwrap_err());
41+
}
42+
}
Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,40 @@
1+
// Create a pipeline with Array Processor Length Operation returns "OK" response
2+
use datadog_api_client::datadog;
3+
use datadog_api_client::datadogV1::api_logs_pipelines::LogsPipelinesAPI;
4+
use datadog_api_client::datadogV1::model::LogsArrayProcessor;
5+
use datadog_api_client::datadogV1::model::LogsArrayProcessorOperation;
6+
use datadog_api_client::datadogV1::model::LogsArrayProcessorOperationLength;
7+
use datadog_api_client::datadogV1::model::LogsArrayProcessorOperationLengthType;
8+
use datadog_api_client::datadogV1::model::LogsArrayProcessorType;
9+
use datadog_api_client::datadogV1::model::LogsFilter;
10+
use datadog_api_client::datadogV1::model::LogsPipeline;
11+
use datadog_api_client::datadogV1::model::LogsProcessor;
12+
13+
#[tokio::main]
14+
async fn main() {
15+
let body = LogsPipeline::new("testPipelineArrayLength".to_string())
16+
.filter(LogsFilter::new().query("source:python".to_string()))
17+
.processors(vec![LogsProcessor::LogsArrayProcessor(Box::new(
18+
LogsArrayProcessor::new(
19+
LogsArrayProcessorOperation::LogsArrayProcessorOperationLength(Box::new(
20+
LogsArrayProcessorOperationLength::new(
21+
"tags".to_string(),
22+
"tagCount".to_string(),
23+
LogsArrayProcessorOperationLengthType::LENGTH,
24+
),
25+
)),
26+
LogsArrayProcessorType::ARRAY_PROCESSOR,
27+
)
28+
.is_enabled(true)
29+
.name("count_tags".to_string()),
30+
))])
31+
.tags(vec![]);
32+
let configuration = datadog::Configuration::new();
33+
let api = LogsPipelinesAPI::with_config(configuration);
34+
let resp = api.create_logs_pipeline(body).await;
35+
if let Ok(value) = resp {
36+
println!("{:#?}", value);
37+
} else {
38+
println!("{:#?}", resp.unwrap_err());
39+
}
40+
}
Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,42 @@
1+
// Create a pipeline with Array Processor Append Operation with preserve_source
2+
// true returns "OK" response
3+
use datadog_api_client::datadog;
4+
use datadog_api_client::datadogV1::api_logs_pipelines::LogsPipelinesAPI;
5+
use datadog_api_client::datadogV1::model::LogsArrayProcessor;
6+
use datadog_api_client::datadogV1::model::LogsArrayProcessorOperation;
7+
use datadog_api_client::datadogV1::model::LogsArrayProcessorOperationAppend;
8+
use datadog_api_client::datadogV1::model::LogsArrayProcessorOperationAppendType;
9+
use datadog_api_client::datadogV1::model::LogsArrayProcessorType;
10+
use datadog_api_client::datadogV1::model::LogsFilter;
11+
use datadog_api_client::datadogV1::model::LogsPipeline;
12+
use datadog_api_client::datadogV1::model::LogsProcessor;
13+
14+
#[tokio::main]
15+
async fn main() {
16+
let body = LogsPipeline::new("testPipelineArrayAppendPreserve".to_string())
17+
.filter(LogsFilter::new().query("source:python".to_string()))
18+
.processors(vec![LogsProcessor::LogsArrayProcessor(Box::new(
19+
LogsArrayProcessor::new(
20+
LogsArrayProcessorOperation::LogsArrayProcessorOperationAppend(Box::new(
21+
LogsArrayProcessorOperationAppend::new(
22+
"network.client.ip".to_string(),
23+
"sourceIps".to_string(),
24+
LogsArrayProcessorOperationAppendType::APPEND,
25+
)
26+
.preserve_source(true),
27+
)),
28+
LogsArrayProcessorType::ARRAY_PROCESSOR,
29+
)
30+
.is_enabled(true)
31+
.name("append_ip_and_keep_source".to_string()),
32+
))])
33+
.tags(vec![]);
34+
let configuration = datadog::Configuration::new();
35+
let api = LogsPipelinesAPI::with_config(configuration);
36+
let resp = api.create_logs_pipeline(body).await;
37+
if let Ok(value) = resp {
38+
println!("{:#?}", value);
39+
} else {
40+
println!("{:#?}", resp.unwrap_err());
41+
}
42+
}
Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,42 @@
1+
// Create a pipeline with Array Processor Append Operation with preserve_source
2+
// false returns "OK" response
3+
use datadog_api_client::datadog;
4+
use datadog_api_client::datadogV1::api_logs_pipelines::LogsPipelinesAPI;
5+
use datadog_api_client::datadogV1::model::LogsArrayProcessor;
6+
use datadog_api_client::datadogV1::model::LogsArrayProcessorOperation;
7+
use datadog_api_client::datadogV1::model::LogsArrayProcessorOperationAppend;
8+
use datadog_api_client::datadogV1::model::LogsArrayProcessorOperationAppendType;
9+
use datadog_api_client::datadogV1::model::LogsArrayProcessorType;
10+
use datadog_api_client::datadogV1::model::LogsFilter;
11+
use datadog_api_client::datadogV1::model::LogsPipeline;
12+
use datadog_api_client::datadogV1::model::LogsProcessor;
13+
14+
#[tokio::main]
15+
async fn main() {
16+
let body = LogsPipeline::new("testPipelineArrayAppendNoPreserve".to_string())
17+
.filter(LogsFilter::new().query("source:python".to_string()))
18+
.processors(vec![LogsProcessor::LogsArrayProcessor(Box::new(
19+
LogsArrayProcessor::new(
20+
LogsArrayProcessorOperation::LogsArrayProcessorOperationAppend(Box::new(
21+
LogsArrayProcessorOperationAppend::new(
22+
"network.client.ip".to_string(),
23+
"sourceIps".to_string(),
24+
LogsArrayProcessorOperationAppendType::APPEND,
25+
)
26+
.preserve_source(false),
27+
)),
28+
LogsArrayProcessorType::ARRAY_PROCESSOR,
29+
)
30+
.is_enabled(true)
31+
.name("append_ip_and_remove_source".to_string()),
32+
))])
33+
.tags(vec![]);
34+
let configuration = datadog::Configuration::new();
35+
let api = LogsPipelinesAPI::with_config(configuration);
36+
let resp = api.create_logs_pipeline(body).await;
37+
if let Ok(value) = resp {
38+
println!("{:#?}", value);
39+
} else {
40+
println!("{:#?}", resp.unwrap_err());
41+
}
42+
}

0 commit comments

Comments
 (0)