Skip to content

Commit 31b5d32

Browse files
author
ci.datadog-api-spec
committed
Regenerate client from commit e5eab154 of spec repo
1 parent 56ed157 commit 31b5d32

File tree

9 files changed

+85
-90
lines changed

9 files changed

+85
-90
lines changed

.apigentools-info

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -4,13 +4,13 @@
44
"spec_versions": {
55
"v1": {
66
"apigentools_version": "1.6.6",
7-
"regenerated": "2024-11-25 19:59:32.732993",
8-
"spec_repo_commit": "3c840607"
7+
"regenerated": "2024-11-26 23:35:31.593194",
8+
"spec_repo_commit": "e5eab154"
99
},
1010
"v2": {
1111
"apigentools_version": "1.6.6",
12-
"regenerated": "2024-11-25 19:59:32.752304",
13-
"spec_repo_commit": "3c840607"
12+
"regenerated": "2024-11-26 23:35:31.619279",
13+
"spec_repo_commit": "e5eab154"
1414
}
1515
}
1616
}

.generator/schemas/v2/openapi.yaml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -31492,6 +31492,7 @@ paths:
3149231492
$ref: '#/components/responses/TooManyRequestsResponse'
3149331493
security:
3149431494
- apiKeyAuth: []
31495+
appKeyAuth: []
3149531496
summary: Post an event
3149631497
tags:
3149731498
- Events

features/v1/logs_indexes.feature

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -10,70 +10,70 @@ Feature: Logs Indexes
1010
And a valid "appKeyAuth" key in the system
1111
And an instance of "LogsIndexes" API
1212

13-
@generated @skip @team:DataDog/logs-backend
13+
@generated @skip @team:DataDog/logs-backend @team:DataDog/logs-core
1414
Scenario: Create an index returns "Invalid Parameter Error" response
1515
Given new "CreateLogsIndex" request
1616
And body with value {"daily_limit": 300000000, "daily_limit_reset": {"reset_time": "14:00", "reset_utc_offset": "+02:00"}, "daily_limit_warning_threshold_percentage": 70, "exclusion_filters": [{"filter": {"query": "*", "sample_rate": 1.0}, "name": "payment"}], "filter": {"query": "source:python"}, "name": "main", "num_flex_logs_retention_days": 360, "num_retention_days": 15}
1717
When the request is sent
1818
Then the response status is 400 Invalid Parameter Error
1919

20-
@generated @skip @team:DataDog/logs-backend
20+
@generated @skip @team:DataDog/logs-backend @team:DataDog/logs-core
2121
Scenario: Create an index returns "OK" response
2222
Given new "CreateLogsIndex" request
2323
And body with value {"daily_limit": 300000000, "daily_limit_reset": {"reset_time": "14:00", "reset_utc_offset": "+02:00"}, "daily_limit_warning_threshold_percentage": 70, "exclusion_filters": [{"filter": {"query": "*", "sample_rate": 1.0}, "name": "payment"}], "filter": {"query": "source:python"}, "name": "main", "num_flex_logs_retention_days": 360, "num_retention_days": 15}
2424
When the request is sent
2525
Then the response status is 200 OK
2626

27-
@generated @skip @team:DataDog/logs-backend
27+
@generated @skip @team:DataDog/logs-backend @team:DataDog/logs-core
2828
Scenario: Get all indexes returns "OK" response
2929
Given new "ListLogIndexes" request
3030
When the request is sent
3131
Then the response status is 200 OK
3232

33-
@generated @skip @team:DataDog/logs-backend
33+
@generated @skip @team:DataDog/logs-backend @team:DataDog/logs-core
3434
Scenario: Get an index returns "Not Found" response
3535
Given new "GetLogsIndex" request
3636
And request contains "name" parameter from "REPLACE.ME"
3737
When the request is sent
3838
Then the response status is 404 Not Found
3939

40-
@generated @skip @team:DataDog/logs-backend
40+
@generated @skip @team:DataDog/logs-backend @team:DataDog/logs-core
4141
Scenario: Get an index returns "OK" response
4242
Given new "GetLogsIndex" request
4343
And request contains "name" parameter from "REPLACE.ME"
4444
When the request is sent
4545
Then the response status is 200 OK
4646

47-
@generated @skip @team:DataDog/logs-backend
47+
@generated @skip @team:DataDog/logs-backend @team:DataDog/logs-core
4848
Scenario: Get indexes order returns "OK" response
4949
Given new "GetLogsIndexOrder" request
5050
When the request is sent
5151
Then the response status is 200 OK
5252

53-
@generated @skip @team:DataDog/logs-backend
53+
@generated @skip @team:DataDog/logs-backend @team:DataDog/logs-core
5454
Scenario: Update an index returns "Invalid Parameter Error" response
5555
Given new "UpdateLogsIndex" request
5656
And request contains "name" parameter from "REPLACE.ME"
5757
And body with value {"daily_limit": 300000000, "daily_limit_reset": {"reset_time": "14:00", "reset_utc_offset": "+02:00"}, "daily_limit_warning_threshold_percentage": 70, "disable_daily_limit": false, "exclusion_filters": [{"filter": {"query": "*", "sample_rate": 1.0}, "name": "payment"}], "filter": {"query": "source:python"}, "num_flex_logs_retention_days": 360, "num_retention_days": 15}
5858
When the request is sent
5959
Then the response status is 400 Invalid Parameter Error
6060

61-
@generated @skip @team:DataDog/logs-backend
61+
@generated @skip @team:DataDog/logs-backend @team:DataDog/logs-core
6262
Scenario: Update an index returns "OK" response
6363
Given new "UpdateLogsIndex" request
6464
And request contains "name" parameter from "REPLACE.ME"
6565
And body with value {"daily_limit": 300000000, "daily_limit_reset": {"reset_time": "14:00", "reset_utc_offset": "+02:00"}, "daily_limit_warning_threshold_percentage": 70, "disable_daily_limit": false, "exclusion_filters": [{"filter": {"query": "*", "sample_rate": 1.0}, "name": "payment"}], "filter": {"query": "source:python"}, "num_flex_logs_retention_days": 360, "num_retention_days": 15}
6666
When the request is sent
6767
Then the response status is 200 OK
6868

69-
@generated @skip @team:DataDog/logs-backend
69+
@generated @skip @team:DataDog/logs-backend @team:DataDog/logs-core
7070
Scenario: Update indexes order returns "Bad Request" response
7171
Given new "UpdateLogsIndexOrder" request
7272
And body with value {"index_names": ["main", "payments", "web"]}
7373
When the request is sent
7474
Then the response status is 400 Bad Request
7575

76-
@generated @skip @team:DataDog/logs-backend
76+
@generated @skip @team:DataDog/logs-backend @team:DataDog/logs-core
7777
Scenario: Update indexes order returns "OK" response
7878
Given new "UpdateLogsIndexOrder" request
7979
And body with value {"index_names": ["main", "payments", "web"]}

features/v2/events.feature

Lines changed: 8 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -7,26 +7,24 @@ Feature: Events
77

88
Background:
99
Given a valid "apiKeyAuth" key in the system
10+
And a valid "appKeyAuth" key in the system
1011
And an instance of "Events" API
1112

1213
@generated @skip @team:DataDog/event-management
1314
Scenario: Get a list of events returns "Bad Request" response
14-
Given a valid "appKeyAuth" key in the system
15-
And new "ListEvents" request
15+
Given new "ListEvents" request
1616
When the request is sent
1717
Then the response status is 400 Bad Request
1818

1919
@skip-validation @team:DataDog/event-management
2020
Scenario: Get a list of events returns "OK" response
21-
Given a valid "appKeyAuth" key in the system
22-
And new "ListEvents" request
21+
Given new "ListEvents" request
2322
When the request is sent
2423
Then the response status is 200 OK
2524

2625
@replay-only @skip-validation @team:DataDog/event-management @with-pagination
2726
Scenario: Get a list of events returns "OK" response with pagination
28-
Given a valid "appKeyAuth" key in the system
29-
And new "ListEvents" request
27+
Given new "ListEvents" request
3028
And request contains "filter[from]" parameter with value "now-15m"
3129
And request contains "filter[to]" parameter with value "now"
3230
And request contains "page[limit]" parameter with value 2
@@ -36,8 +34,7 @@ Feature: Events
3634

3735
@team:DataDog/event-management
3836
Scenario: Get a quick list of events returns "OK" response
39-
Given a valid "appKeyAuth" key in the system
40-
And new "ListEvents" request
37+
Given new "ListEvents" request
4138
And request contains "filter[query]" parameter with value "datadog-agent"
4239
And request contains "filter[from]" parameter with value "2020-09-17T11:48:36+01:00"
4340
And request contains "filter[to]" parameter with value "2020-09-17T12:48:36+01:00"
@@ -62,25 +59,22 @@ Feature: Events
6259

6360
@team:DataDog/event-management
6461
Scenario: Search events returns "Bad Request" response
65-
Given a valid "appKeyAuth" key in the system
66-
And new "SearchEvents" request
62+
Given new "SearchEvents" request
6763
And body with value {"filter": {"from": "now-15m", "query": "service:web* AND @http.status_code:[200 TO 299]", "to": "now"}, "options": {"timezone": "GMT"}, "page": {"cursor": "eyJzdGFydEF0IjoiQVFBQUFYS2tMS3pPbm40NGV3QUFBQUJCV0V0clRFdDZVbG8zY3pCRmNsbHJiVmxDWlEifQ==", "limit": 25}, "sort": "timestamp"}
6864
When the request is sent
6965
Then the response status is 400 Bad Request
7066

7167
@team:DataDog/event-management
7268
Scenario: Search events returns "OK" response
73-
Given a valid "appKeyAuth" key in the system
74-
And new "SearchEvents" request
69+
Given new "SearchEvents" request
7570
And body with value {"filter": {"query": "datadog-agent", "from": "2020-09-17T11:48:36+01:00", "to": "2020-09-17T12:48:36+01:00"}, "sort": "timestamp", "page": {"limit": 5}}
7671
When the request is sent
7772
Then the response status is 200 OK
7873
And the response "data" has length 0
7974

8075
@replay-only @skip-validation @team:DataDog/event-management @with-pagination
8176
Scenario: Search events returns "OK" response with pagination
82-
Given a valid "appKeyAuth" key in the system
83-
And new "SearchEvents" request
77+
Given new "SearchEvents" request
8478
And body with value {"filter": {"from": "now-15m", "to": "now"}, "options": {"timezone": "GMT"}, "page": {"limit": 2}, "sort": "timestamp"}
8579
When the request with pagination is sent
8680
Then the response status is 200 OK

features/v2/logs.feature

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -105,44 +105,44 @@ Feature: Logs
105105
Then the response status is 200 OK
106106
And the response has 3 items
107107

108-
@integration-only @skip-terraform-config @skip-validation @team:DataDog/event-platform-intake @team:DataDog/logs-backend
108+
@integration-only @skip-terraform-config @skip-validation @team:DataDog/event-platform-intake @team:DataDog/logs-backend @team:DataDog/logs-ingestion
109109
Scenario: Send deflate logs returns "Request accepted for processing (always 202 empty JSON)." response
110110
Given new "SubmitLog" request
111111
And body with value [{"ddsource": "nginx", "ddtags": "env:staging,version:5.1", "hostname": "i-012345678", "message": "2019-11-19T14:37:58,995 INFO [process.name][20081] Hello World", "service": "payment"}]
112112
And request contains "Content-Encoding" parameter with value "deflate"
113113
When the request is sent
114114
Then the response status is 202 Response from server (always 202 empty JSON).
115115

116-
@integration-only @skip-terraform-config @skip-validation @team:DataDog/event-platform-intake @team:DataDog/logs-backend
116+
@integration-only @skip-terraform-config @skip-validation @team:DataDog/event-platform-intake @team:DataDog/logs-backend @team:DataDog/logs-ingestion
117117
Scenario: Send gzip logs returns "Request accepted for processing (always 202 empty JSON)." response
118118
Given new "SubmitLog" request
119119
And body with value [{"ddsource": "nginx", "ddtags": "env:staging,version:5.1", "hostname": "i-012345678", "message": "2019-11-19T14:37:58,995 INFO [process.name][20081] Hello World", "service": "payment"}]
120120
And request contains "Content-Encoding" parameter with value "gzip"
121121
When the request is sent
122122
Then the response status is 202 Request accepted for processing (always 202 empty JSON).
123123

124-
@generated @skip @team:DataDog/event-platform-intake @team:DataDog/logs-backend
124+
@generated @skip @team:DataDog/event-platform-intake @team:DataDog/logs-backend @team:DataDog/logs-ingestion
125125
Scenario: Send logs returns "Bad Request" response
126126
Given new "SubmitLog" request
127127
And body with value [{"ddsource": "nginx", "ddtags": "env:staging,version:5.1", "hostname": "i-012345678", "message": "2019-11-19T14:37:58,995 INFO [process.name][20081] Hello World", "service": "payment"}]
128128
When the request is sent
129129
Then the response status is 400 Bad Request
130130

131-
@generated @skip @team:DataDog/event-platform-intake @team:DataDog/logs-backend
131+
@generated @skip @team:DataDog/event-platform-intake @team:DataDog/logs-backend @team:DataDog/logs-ingestion
132132
Scenario: Send logs returns "Payload Too Large" response
133133
Given new "SubmitLog" request
134134
And body with value [{"ddsource": "nginx", "ddtags": "env:staging,version:5.1", "hostname": "i-012345678", "message": "2019-11-19T14:37:58,995 INFO [process.name][20081] Hello World", "service": "payment"}]
135135
When the request is sent
136136
Then the response status is 413 Payload Too Large
137137

138-
@generated @skip @team:DataDog/event-platform-intake @team:DataDog/logs-backend
138+
@generated @skip @team:DataDog/event-platform-intake @team:DataDog/logs-backend @team:DataDog/logs-ingestion
139139
Scenario: Send logs returns "Request Timeout" response
140140
Given new "SubmitLog" request
141141
And body with value [{"ddsource": "nginx", "ddtags": "env:staging,version:5.1", "hostname": "i-012345678", "message": "2019-11-19T14:37:58,995 INFO [process.name][20081] Hello World", "service": "payment"}]
142142
When the request is sent
143143
Then the response status is 408 Request Timeout
144144

145-
@team:DataDog/event-platform-intake @team:DataDog/logs-backend
145+
@team:DataDog/event-platform-intake @team:DataDog/logs-backend @team:DataDog/logs-ingestion
146146
Scenario: Send logs returns "Request accepted for processing (always 202 empty JSON)." response
147147
Given new "SubmitLog" request
148148
And body with value [{"ddsource": "nginx", "ddtags": "env:staging,version:5.1", "hostname": "i-012345678", "message": "2019-11-19T14:37:58,995 INFO [process.name][20081] Hello World", "service": "payment", "status": "info"}]

0 commit comments

Comments
 (0)