diff --git a/.generator/schemas/v2/openapi.yaml b/.generator/schemas/v2/openapi.yaml index d2192b74841f..00712ceccf56 100644 --- a/.generator/schemas/v2/openapi.yaml +++ b/.generator/schemas/v2/openapi.yaml @@ -6684,6 +6684,8 @@ components: description: Optional prefix for blobs written to the container. example: logs/ type: string + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' container_name: description: The name of the Azure Blob Storage container to store logs in. @@ -15878,6 +15880,78 @@ components: type: string nullable: true type: array + DORADeploymentFetchResponse: + description: Response for fetching a single deployment event. + properties: + data: + $ref: '#/components/schemas/DORADeploymentObject' + type: object + DORADeploymentObject: + description: A DORA deployment event. + example: + attributes: + custom_tags: + - language:java + - department:engineering + - region:us-east-1 + env: production + finished_at: 1693491984000000000 + git: + commit_sha: 66adc9350f2cc9b250b69abddab733dd55e1a588 + repository_url: https://github.com/organization/example-repository + service: shopist + started_at: 1693491974000000000 + team: backend + version: v1.12.07 + id: 4242fcdd31586083 + type: dora_deployment + properties: + attributes: + $ref: '#/components/schemas/DORADeploymentObjectAttributes' + id: + description: The ID of the deployment event. + type: string + type: + $ref: '#/components/schemas/DORADeploymentType' + type: object + DORADeploymentObjectAttributes: + description: The attributes of the deployment event. + properties: + custom_tags: + $ref: '#/components/schemas/DORACustomTags' + env: + description: Environment name to where the service was deployed. + example: production + type: string + finished_at: + description: Unix timestamp when the deployment finished. + example: 1693491984000000000 + format: int64 + type: integer + git: + $ref: '#/components/schemas/DORAGitInfo' + service: + description: Service name. + example: shopist + type: string + started_at: + description: Unix timestamp when the deployment started. + example: 1693491974000000000 + format: int64 + type: integer + team: + description: Name of the team owning the deployed service. + example: backend + type: string + version: + description: Version to correlate with APM Deployment Tracking. + example: v1.12.07 + type: string + required: + - service + - started_at + - finished_at + type: object DORADeploymentRequest: description: Request to create a DORA deployment event. properties: @@ -15968,18 +16042,53 @@ components: type: string x-enum-varnames: - DORA_DEPLOYMENT - DORAEvent: - description: A DORA event. + DORADeploymentsListResponse: + description: Response for the list deployments endpoint. + example: + data: + - attributes: + custom_tags: + - language:java + - department:engineering + - region:us-east-1 + env: production + finished_at: 1693491984000000000 + git: + commit_sha: 66adc9350f2cc9b250b69abddab733dd55e1a588 + repository_url: https://github.com/organization/example-repository + service: shopist + started_at: 1693491974000000000 + team: backend + version: v1.12.07 + id: 4242fcdd31586083 + type: dora_deployment + - attributes: + custom_tags: + - language:go + - department:platform + env: production + finished_at: 1693492084000000000 + git: + commit_sha: 77bdc9350f2cc9b250b69abddab733dd55e1a599 + repository_url: https://github.com/organization/api-service + service: api-service + started_at: 1693492074000000000 + team: backend + version: v2.1.0 + id: 4242fcdd31586084 + type: dora_deployment properties: - attributes: - description: The attributes of the event. - type: object - id: - description: The ID of the event. - type: string - type: - description: The type of the event. - type: string + data: + description: The list of DORA deployment events. + items: + $ref: '#/components/schemas/DORADeploymentObject' + type: array + type: object + DORAFailureFetchResponse: + description: Response for fetching a single failure event. + properties: + data: + $ref: '#/components/schemas/DORAIncidentObject' type: object DORAFailureRequest: description: Request to create a DORA failure event. @@ -16083,11 +16192,45 @@ components: type: string x-enum-varnames: - DORA_FAILURE - DORAFetchResponse: - description: Response for the DORA fetch endpoints. + DORAFailuresListResponse: + description: Response for the list failures endpoint. + example: + data: + - attributes: + custom_tags: + - incident_type:database + - department:engineering + env: production + finished_at: 1693492274000000000 + name: Database outage + services: + - shopist + severity: SEV-1 + started_at: 1693492174000000000 + team: backend + id: 4242fcdd31586085 + type: dora_incident + - attributes: + custom_tags: + - incident_type:service_down + - department:platform + env: production + finished_at: 1693492474000000000 + name: API service outage + services: + - api-service + - payment-service + severity: SEV-2 + started_at: 1693492374000000000 + team: backend + id: 4242fcdd31586086 + type: dora_incident properties: data: - $ref: '#/components/schemas/DORAEvent' + description: The list of DORA incident events. + items: + $ref: '#/components/schemas/DORAIncidentObject' + type: array type: object DORAGitInfo: description: Git info for DORA Metrics events. @@ -16100,6 +16243,82 @@ components: - repository_url - commit_sha type: object + DORAIncidentObject: + description: A DORA incident event. + example: + attributes: + custom_tags: + - incident_type:database + - department:engineering + env: production + finished_at: 1693492274000000000 + git: + commit_sha: 66adc9350f2cc9b250b69abddab733dd55e1a588 + repository_url: https://github.com/organization/example-repository + name: Database outage + services: + - shopist + severity: SEV-1 + started_at: 1693492174000000000 + team: backend + id: 4242fcdd31586085 + type: dora_incident + properties: + attributes: + $ref: '#/components/schemas/DORAIncidentObjectAttributes' + id: + description: The ID of the incident event. + type: string + type: + $ref: '#/components/schemas/DORAFailureType' + type: object + DORAIncidentObjectAttributes: + description: The attributes of the incident event. + properties: + custom_tags: + $ref: '#/components/schemas/DORACustomTags' + env: + description: Environment name that was impacted by the incident. + example: production + type: string + finished_at: + description: Unix timestamp when the incident finished. + example: 1693491984000000000 + format: int64 + type: integer + git: + $ref: '#/components/schemas/DORAGitInfo' + name: + description: Incident name. + example: Database outage + type: string + services: + description: Service names impacted by the incident. + example: + - shopist + items: + type: string + type: array + severity: + description: Incident severity. + example: SEV-1 + type: string + started_at: + description: Unix timestamp when the incident started. + example: 1693491974000000000 + format: int64 + type: integer + team: + description: Name of the team owning the services impacted. + example: backend + type: string + version: + description: Version to correlate with APM Deployment Tracking. + example: v1.12.07 + type: string + required: + - started_at + type: object DORAListDeploymentsRequest: description: Request to get a list of deployments. example: @@ -16119,32 +16338,31 @@ components: type: object DORAListDeploymentsRequestAttributes: description: Attributes to get a list of deployments. - example: - from: '2025-01-01T00:00:00Z' - limit: 500 - query: service:(shopist OR api-service OR payment-service) env:(production - OR staging) team:(backend OR platform) - sort: -started_at - to: '2025-01-31T23:59:59Z' properties: from: description: Minimum timestamp for requested events. + example: '2025-01-01T00:00:00Z' format: date-time type: string limit: default: 10 description: Maximum number of events in the response. + example: 500 format: int32 maximum: 1000 type: integer query: description: Search query with event platform syntax. + example: service:(shopist OR api-service OR payment-service) env:(production + OR staging) team:(backend OR platform) type: string sort: description: Sort order (prefixed with `-` for descending). + example: -started_at type: string to: description: Maximum timestamp for requested events. + example: '2025-01-31T23:59:59Z' format: date-time type: string type: object @@ -16167,9 +16385,11 @@ components: - attributes type: object DORAListDeploymentsRequestDataType: + default: dora_deployments_list_request description: The definition of `DORAListDeploymentsRequestDataType` object. enum: - dora_deployments_list_request + example: dora_deployments_list_request type: string x-enum-varnames: - DORA_DEPLOYMENTS_LIST_REQUEST @@ -16192,32 +16412,31 @@ components: type: object DORAListFailuresRequestAttributes: description: Attributes to get a list of failures. - example: - from: '2025-01-01T00:00:00Z' - limit: 500 - query: severity:(SEV-1 OR SEV-2) env:(production OR staging) service:(shopist - OR api-service OR payment-service) team:(backend OR platform OR payments) - sort: -started_at - to: '2025-01-31T23:59:59Z' properties: from: description: Minimum timestamp for requested events. + example: '2025-01-01T00:00:00Z' format: date-time type: string limit: default: 10 description: Maximum number of events in the response. + example: 500 format: int32 maximum: 1000 type: integer query: description: Search query with event platform syntax. + example: severity:(SEV-1 OR SEV-2) env:(production OR staging) service:(shopist + OR api-service OR payment-service) team:(backend OR platform OR payments) type: string sort: description: Sort order (prefixed with `-` for descending). + example: -started_at type: string to: description: Maximum timestamp for requested events. + example: '2025-01-31T23:59:59Z' format: date-time type: string type: object @@ -16240,54 +16459,14 @@ components: - attributes type: object DORAListFailuresRequestDataType: + default: dora_failures_list_request description: The definition of `DORAListFailuresRequestDataType` object. enum: - dora_failures_list_request + example: dora_failures_list_request type: string x-enum-varnames: - DORA_FAILURES_LIST_REQUEST - DORAListResponse: - description: Response for the DORA list endpoints. - example: - data: - - attributes: - custom_tags: - - language:java - - department:engineering - - region:us-east-1 - env: production - finished_at: 1693491984000000000 - git: - commit_sha: 66adc9350f2cc9b250b69abddab733dd55e1a588 - repository_url: https://github.com/organization/example-repository - service: shopist - started_at: 1693491974000000000 - team: backend - version: v1.12.07 - id: 4242fcdd31586083 - type: dora_deployment - - attributes: - custom_tags: - - language:go - - department:platform - env: production - finished_at: 1693492084000000000 - git: - commit_sha: 77bdc9350f2cc9b250b69abddab733dd55e1a599 - repository_url: https://github.com/organization/api-service - service: api-service - started_at: 1693492074000000000 - team: backend - version: v2.1.0 - id: 4242fcdd31586084 - type: dora_deployment - properties: - data: - description: The list of DORA events. - items: - $ref: '#/components/schemas/DORAEvent' - type: array - type: object DashboardListAddItemsRequest: description: Request containing a list of dashboards to add. properties: @@ -33334,6 +33513,8 @@ components: description: The `microsoft_sentinel` destination forwards logs to Microsoft Sentinel. properties: + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' client_id: description: Azure AD client ID used for authentication. example: a1b2c3d4-5678-90ab-cdef-1234567890ab @@ -35211,6 +35392,8 @@ components: properties: auth: $ref: '#/components/schemas/ObservabilityPipelineAmazonOpenSearchDestinationAuth' + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' bulk_index: description: The index to write logs to. example: logs-index @@ -35287,6 +35470,8 @@ components: description: S3 bucket name. example: error-logs type: string + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' id: description: Unique identifier for the destination component. example: amazon-s3-destination @@ -35398,6 +35583,8 @@ components: description: Name of the Amazon S3 bucket in Security Lake (3-63 characters). example: security-lake-bucket type: string + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' custom_source_name: description: Custom source name for the logs in Security Lake. example: my-custom-source @@ -35455,6 +35642,39 @@ components: role session. type: string type: object + ObservabilityPipelineBufferOptions: + description: Configuration for buffer settings on destination components. + oneOf: + - $ref: '#/components/schemas/ObservabilityPipelineDiskBufferOptions' + - $ref: '#/components/schemas/ObservabilityPipelineMemoryBufferOptions' + - $ref: '#/components/schemas/ObservabilityPipelineMemoryBufferSizeOptions' + ObservabilityPipelineBufferOptionsDiskType: + default: disk + description: The type of the buffer that will be configured, a disk buffer. + enum: + - disk + type: string + x-enum-varnames: + - DISK + ObservabilityPipelineBufferOptionsMemoryType: + default: memory + description: The type of the buffer that will be configured, a memory buffer. + enum: + - memory + type: string + x-enum-varnames: + - MEMORY + ObservabilityPipelineBufferOptionsWhenFull: + default: block + description: Behavior when the buffer is full (block and stop accepting new + events, or drop new events) + enum: + - block + - drop_newest + type: string + x-enum-varnames: + - BLOCK + - DROP_NEWEST ObservabilityPipelineComponentDisplayName: description: The display name for a component. example: my component @@ -35641,6 +35861,8 @@ components: description: The `crowdstrike_next_gen_siem` destination forwards logs to CrowdStrike Next Gen SIEM. properties: + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' compression: $ref: '#/components/schemas/ObservabilityPipelineCrowdStrikeNextGenSiemDestinationCompression' encoding: @@ -35849,6 +36071,8 @@ components: ObservabilityPipelineDatadogLogsDestination: description: The `datadog_logs` destination forwards logs to Datadog Log Management. properties: + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' id: description: The unique identifier for this component. example: datadog-logs-destination @@ -36020,12 +36244,27 @@ components: type: string x-enum-varnames: - DEDUPE + ObservabilityPipelineDiskBufferOptions: + description: Options for configuring a disk buffer. + properties: + max_size: + description: Maximum size of the disk buffer. + example: 4096 + format: int64 + type: integer + type: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptionsDiskType' + when_full: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptionsWhenFull' + type: object ObservabilityPipelineElasticsearchDestination: description: The `elasticsearch` destination writes logs to an Elasticsearch cluster. properties: api_version: $ref: '#/components/schemas/ObservabilityPipelineElasticsearchDestinationApiVersion' + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' bulk_index: description: The index to write logs to in Elasticsearch. example: logs-index @@ -36496,6 +36735,8 @@ components: properties: auth: $ref: '#/components/schemas/ObservabilityPipelineGcpAuth' + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' customer_id: description: The Google Chronicle customer ID. example: abcdefg123456789 @@ -36559,6 +36800,8 @@ components: description: Name of the GCS bucket. example: error-logs type: string + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' id: description: Unique identifier for the destination component. example: gcs-destination @@ -36638,6 +36881,8 @@ components: properties: auth: $ref: '#/components/schemas/ObservabilityPipelineGcpAuth' + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' encoding: $ref: '#/components/schemas/ObservabilityPipelineGooglePubSubDestinationEncoding' id: @@ -36925,6 +37170,28 @@ components: type: string x-enum-varnames: - LOGSTASH + ObservabilityPipelineMemoryBufferOptions: + description: Options for configuring a memory buffer by byte size. + properties: + max_size: + description: Maximum size of the disk buffer. + example: 4096 + format: int64 + type: integer + type: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptionsMemoryType' + type: object + ObservabilityPipelineMemoryBufferSizeOptions: + description: Options for configuring a memory buffer by queue length. + properties: + max_events: + description: Maximum events for the memory buffer. + example: 500 + format: int64 + type: integer + type: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptionsMemoryType' + type: object ObservabilityPipelineMetadataEntry: description: A custom metadata entry. properties: @@ -36948,6 +37215,8 @@ components: ObservabilityPipelineNewRelicDestination: description: The `new_relic` destination sends logs to the New Relic platform. properties: + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' id: description: The unique identifier for this component. example: new-relic-destination @@ -37084,6 +37353,8 @@ components: ObservabilityPipelineOpenSearchDestination: description: The `opensearch` destination writes logs to an OpenSearch cluster. properties: + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' bulk_index: description: The index to write logs to. example: logs-index @@ -37635,6 +37906,8 @@ components: description: The `rsyslog` destination forwards logs to an external `rsyslog` server over TCP or UDP using the syslog protocol. properties: + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' id: description: The unique identifier for this component. example: rsyslog-destination @@ -38105,6 +38378,8 @@ components: ObservabilityPipelineSentinelOneDestination: description: The `sentinel_one` destination sends logs to SentinelOne. properties: + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' id: description: The unique identifier for this component. example: sentinelone-destination @@ -38154,6 +38429,8 @@ components: description: The `socket` destination sends logs over TCP or UDP to a remote server. properties: + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' encoding: $ref: '#/components/schemas/ObservabilityPipelineSocketDestinationEncoding' framing: @@ -38453,6 +38730,8 @@ components: If `false`, Splunk assigns the time the event was received.' example: true type: boolean + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' encoding: $ref: '#/components/schemas/ObservabilityPipelineSplunkHecDestinationEncoding' id: @@ -38562,6 +38841,8 @@ components: ObservabilityPipelineSumoLogicDestination: description: The `sumo_logic` destination forwards logs to Sumo Logic. properties: + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' encoding: $ref: '#/components/schemas/ObservabilityPipelineSumoLogicDestinationEncoding' header_custom_fields: @@ -38665,6 +38946,8 @@ components: description: The `syslog_ng` destination forwards logs to an external `syslog-ng` server over TCP or UDP using the syslog protocol. properties: + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' id: description: The unique identifier for this component. example: syslog-ng-destination @@ -67820,7 +68103,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/DORAListResponse' + $ref: '#/components/schemas/DORADeploymentsListResponse' description: OK '400': content: @@ -67858,26 +68141,8 @@ paths: '200': content: application/json: - example: - data: - attributes: - custom_tags: - - language:java - - department:engineering - - region:us-east-1 - env: staging - finished_at: 1693491984000000000 - git: - commit_sha: 66adc9350f2cc9b250b69abddab733dd55e1a588 - repository_url: https://github.com/organization/example-repository - service: shopist - started_at: 1693491974000000000 - team: backend - version: v1.12.07 - id: 4242fcdd31586083 - type: dora_deployment - schema: - $ref: '#/components/schemas/DORAFetchResponse' + schema: + $ref: '#/components/schemas/DORADeploymentFetchResponse' description: OK '400': content: @@ -67994,66 +68259,8 @@ paths: '200': content: application/json: - example: - data: - - attributes: - custom_tags: - - language:java - - department:engineering - - region:us-east-1 - env: production - finished_at: 1693491984000000000 - git: - commit_sha: 66adc9350f2cc9b250b69abddab733dd55e1a588 - repository_url: https://github.com/organization/example-repository - name: Web server is down; all requests are failing. - services: - - shopist - severity: SEV-1 - started_at: 1693491974000000000 - team: backend - id: 4242fcdd31586085 - type: dora_failure - - attributes: - custom_tags: - - language:go - - department:platform - env: production - finished_at: 1693492084000000000 - git: - commit_sha: 77bdc9350f2cc9b250b69abddab733dd55e1a599 - repository_url: https://github.com/organization/api-service - name: Database connection timeout - services: - - api-service - - payment-service - severity: SEV-1 - started_at: 1693492074000000000 - team: platform - version: v2.1.0 - id: 4242fcdd31586086 - type: dora_failure - - attributes: - custom_tags: - - language:python - - department:payments - - region:eu-west-1 - env: staging - finished_at: 1693492204000000000 - git: - commit_sha: 99edc9350f2cc9b250b69abddab733dd55e1a601 - repository_url: https://github.com/organization/payment-service - name: Payment gateway API rate limit exceeded - services: - - payment-service - severity: SEV-2 - started_at: 1693492174000000000 - team: payments - version: v1.8.3 - id: 4242fcdd31586087 - type: dora_failure - schema: - $ref: '#/components/schemas/DORAListResponse' + schema: + $ref: '#/components/schemas/DORAFailuresListResponse' description: OK '400': content: @@ -68091,28 +68298,8 @@ paths: '200': content: application/json: - example: - data: - attributes: - custom_tags: - - language:java - - department:engineering - - region:us-east-1 - env: staging - finished_at: 1693491984000000000 - git: - commit_sha: 66adc9350f2cc9b250b69abddab733dd55e1a588 - repository_url: https://github.com/organization/example-repository - name: Web server is down; all requests are failing. - services: - - shopist - severity: High - started_at: 1693491974000000000 - team: backend - id: 4242fcdd31586085 - type: dora_failure - schema: - $ref: '#/components/schemas/DORAFetchResponse' + schema: + $ref: '#/components/schemas/DORAFailureFetchResponse' description: OK '400': content: diff --git a/cassettes/features/v2/observability_pipelines/Create-a-new-pipeline-returns-Bad-Request-response.frozen b/cassettes/features/v2/observability_pipelines/Create-a-new-pipeline-returns-Bad-Request-response.frozen index 73133f1c75e9..b835c39be389 100644 --- a/cassettes/features/v2/observability_pipelines/Create-a-new-pipeline-returns-Bad-Request-response.frozen +++ b/cassettes/features/v2/observability_pipelines/Create-a-new-pipeline-returns-Bad-Request-response.frozen @@ -1 +1 @@ -2025-12-18T16:15:15.575Z \ No newline at end of file +2026-01-06T13:50:00.603Z \ No newline at end of file diff --git a/cassettes/features/v2/observability_pipelines/Create-a-new-pipeline-returns-Bad-Request-response.yml b/cassettes/features/v2/observability_pipelines/Create-a-new-pipeline-returns-Bad-Request-response.yml index 868bbb7746b9..9939f72c5ae3 100644 --- a/cassettes/features/v2/observability_pipelines/Create-a-new-pipeline-returns-Bad-Request-response.yml +++ b/cassettes/features/v2/observability_pipelines/Create-a-new-pipeline-returns-Bad-Request-response.yml @@ -1,5 +1,5 @@ http_interactions: -- recorded_at: Thu, 18 Dec 2025 16:15:15 GMT +- recorded_at: Tue, 06 Jan 2026 13:50:00 GMT request: body: encoding: UTF-8 diff --git a/cassettes/features/v2/observability_pipelines/Create-a-new-pipeline-returns-OK-response.frozen b/cassettes/features/v2/observability_pipelines/Create-a-new-pipeline-returns-OK-response.frozen index 20165353d5c2..f99b046bd97f 100644 --- a/cassettes/features/v2/observability_pipelines/Create-a-new-pipeline-returns-OK-response.frozen +++ b/cassettes/features/v2/observability_pipelines/Create-a-new-pipeline-returns-OK-response.frozen @@ -1 +1 @@ -2025-12-18T16:15:16.062Z \ No newline at end of file +2026-01-06T13:50:01.032Z \ No newline at end of file diff --git a/cassettes/features/v2/observability_pipelines/Create-a-new-pipeline-returns-OK-response.yml b/cassettes/features/v2/observability_pipelines/Create-a-new-pipeline-returns-OK-response.yml index 67d4c2f969aa..6509dd81d2c1 100644 --- a/cassettes/features/v2/observability_pipelines/Create-a-new-pipeline-returns-OK-response.yml +++ b/cassettes/features/v2/observability_pipelines/Create-a-new-pipeline-returns-OK-response.yml @@ -1,5 +1,5 @@ http_interactions: -- recorded_at: Thu, 18 Dec 2025 16:15:16 GMT +- recorded_at: Tue, 06 Jan 2026 13:50:01 GMT request: body: encoding: UTF-8 @@ -15,7 +15,7 @@ http_interactions: response: body: encoding: UTF-8 - string: '{"data":{"id":"bd8d693c-dc2c-11f0-bf69-da7ad0900002","type":"pipelines","attributes":{"name":"Main + string: '{"data":{"id":"98cbafb2-eb06-11f0-b183-da7ad0900002","type":"pipelines","attributes":{"name":"Main Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["my-processor-group"],"type":"datadog_logs"}],"processors":[{"enabled":true,"id":"my-processor-group","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}} ' @@ -25,14 +25,14 @@ http_interactions: status: code: 201 message: Created -- recorded_at: Thu, 18 Dec 2025 16:15:16 GMT +- recorded_at: Tue, 06 Jan 2026 13:50:01 GMT request: body: null headers: Accept: - '*/*' method: DELETE - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/bd8d693c-dc2c-11f0-bf69-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/98cbafb2-eb06-11f0-b183-da7ad0900002 response: body: encoding: UTF-8 diff --git a/cassettes/features/v2/observability_pipelines/Delete-a-pipeline-returns-Not-Found-response.frozen b/cassettes/features/v2/observability_pipelines/Delete-a-pipeline-returns-Not-Found-response.frozen index 85deda9192b8..1a1e0e6af8ae 100644 --- a/cassettes/features/v2/observability_pipelines/Delete-a-pipeline-returns-Not-Found-response.frozen +++ b/cassettes/features/v2/observability_pipelines/Delete-a-pipeline-returns-Not-Found-response.frozen @@ -1 +1 @@ -2025-12-18T16:15:17.165Z \ No newline at end of file +2026-01-06T13:50:02.108Z \ No newline at end of file diff --git a/cassettes/features/v2/observability_pipelines/Delete-a-pipeline-returns-Not-Found-response.yml b/cassettes/features/v2/observability_pipelines/Delete-a-pipeline-returns-Not-Found-response.yml index 5d29be3f71d4..a9af8d4297dc 100644 --- a/cassettes/features/v2/observability_pipelines/Delete-a-pipeline-returns-Not-Found-response.yml +++ b/cassettes/features/v2/observability_pipelines/Delete-a-pipeline-returns-Not-Found-response.yml @@ -1,5 +1,5 @@ http_interactions: -- recorded_at: Thu, 18 Dec 2025 16:15:17 GMT +- recorded_at: Tue, 06 Jan 2026 13:50:02 GMT request: body: null headers: diff --git a/cassettes/features/v2/observability_pipelines/Delete-a-pipeline-returns-OK-response.frozen b/cassettes/features/v2/observability_pipelines/Delete-a-pipeline-returns-OK-response.frozen index 201ee9bda87c..b0c0cd0458bd 100644 --- a/cassettes/features/v2/observability_pipelines/Delete-a-pipeline-returns-OK-response.frozen +++ b/cassettes/features/v2/observability_pipelines/Delete-a-pipeline-returns-OK-response.frozen @@ -1 +1 @@ -2025-12-18T16:15:17.716Z \ No newline at end of file +2026-01-06T13:50:02.599Z \ No newline at end of file diff --git a/cassettes/features/v2/observability_pipelines/Delete-a-pipeline-returns-OK-response.yml b/cassettes/features/v2/observability_pipelines/Delete-a-pipeline-returns-OK-response.yml index 9f60b32d4ad6..50db62888858 100644 --- a/cassettes/features/v2/observability_pipelines/Delete-a-pipeline-returns-OK-response.yml +++ b/cassettes/features/v2/observability_pipelines/Delete-a-pipeline-returns-OK-response.yml @@ -1,5 +1,5 @@ http_interactions: -- recorded_at: Thu, 18 Dec 2025 16:15:17 GMT +- recorded_at: Tue, 06 Jan 2026 13:50:02 GMT request: body: encoding: UTF-8 @@ -17,7 +17,7 @@ http_interactions: response: body: encoding: UTF-8 - string: '{"data":{"id":"be89fea4-dc2c-11f0-bdea-da7ad0900002","type":"pipelines","attributes":{"name":"Main + string: '{"data":{"id":"99bc2ab4-eb06-11f0-a7ff-da7ad0900002","type":"pipelines","attributes":{"name":"Main Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}} @@ -29,14 +29,14 @@ http_interactions: status: code: 201 message: Created -- recorded_at: Thu, 18 Dec 2025 16:15:17 GMT +- recorded_at: Tue, 06 Jan 2026 13:50:02 GMT request: body: null headers: Accept: - '*/*' method: DELETE - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/be89fea4-dc2c-11f0-bdea-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/99bc2ab4-eb06-11f0-a7ff-da7ad0900002 response: body: encoding: UTF-8 @@ -47,14 +47,14 @@ http_interactions: status: code: 204 message: No Content -- recorded_at: Thu, 18 Dec 2025 16:15:17 GMT +- recorded_at: Tue, 06 Jan 2026 13:50:02 GMT request: body: null headers: Accept: - '*/*' method: DELETE - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/be89fea4-dc2c-11f0-bdea-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/99bc2ab4-eb06-11f0-a7ff-da7ad0900002 response: body: encoding: UTF-8 diff --git a/cassettes/features/v2/observability_pipelines/Get-a-specific-pipeline-returns-OK-response.frozen b/cassettes/features/v2/observability_pipelines/Get-a-specific-pipeline-returns-OK-response.frozen index 2da6d0e5c1dc..8a7dea973b49 100644 --- a/cassettes/features/v2/observability_pipelines/Get-a-specific-pipeline-returns-OK-response.frozen +++ b/cassettes/features/v2/observability_pipelines/Get-a-specific-pipeline-returns-OK-response.frozen @@ -1 +1 @@ -2025-12-18T16:15:20.018Z \ No newline at end of file +2026-01-06T13:50:04.791Z \ No newline at end of file diff --git a/cassettes/features/v2/observability_pipelines/Get-a-specific-pipeline-returns-OK-response.yml b/cassettes/features/v2/observability_pipelines/Get-a-specific-pipeline-returns-OK-response.yml index 2f3f18afc38f..eec74472d1ea 100644 --- a/cassettes/features/v2/observability_pipelines/Get-a-specific-pipeline-returns-OK-response.yml +++ b/cassettes/features/v2/observability_pipelines/Get-a-specific-pipeline-returns-OK-response.yml @@ -1,5 +1,5 @@ http_interactions: -- recorded_at: Thu, 18 Dec 2025 16:15:20 GMT +- recorded_at: Tue, 06 Jan 2026 13:50:04 GMT request: body: encoding: UTF-8 @@ -17,7 +17,7 @@ http_interactions: response: body: encoding: UTF-8 - string: '{"data":{"id":"bfe664a4-dc2c-11f0-bdec-da7ad0900002","type":"pipelines","attributes":{"name":"Main + string: '{"data":{"id":"9b06af3e-eb06-11f0-a801-da7ad0900002","type":"pipelines","attributes":{"name":"Main Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}} @@ -29,18 +29,18 @@ http_interactions: status: code: 201 message: Created -- recorded_at: Thu, 18 Dec 2025 16:15:20 GMT +- recorded_at: Tue, 06 Jan 2026 13:50:04 GMT request: body: null headers: Accept: - application/json method: GET - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/bfe664a4-dc2c-11f0-bdec-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/9b06af3e-eb06-11f0-a801-da7ad0900002 response: body: encoding: UTF-8 - string: '{"data":{"id":"bfe664a4-dc2c-11f0-bdec-da7ad0900002","type":"pipelines","attributes":{"name":"Main + string: '{"data":{"id":"9b06af3e-eb06-11f0-a801-da7ad0900002","type":"pipelines","attributes":{"name":"Main Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}} @@ -52,14 +52,14 @@ http_interactions: status: code: 200 message: OK -- recorded_at: Thu, 18 Dec 2025 16:15:20 GMT +- recorded_at: Tue, 06 Jan 2026 13:50:04 GMT request: body: null headers: Accept: - '*/*' method: DELETE - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/bfe664a4-dc2c-11f0-bdec-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/9b06af3e-eb06-11f0-a801-da7ad0900002 response: body: encoding: UTF-8 diff --git a/cassettes/features/v2/observability_pipelines/List-pipelines-returns-Bad-Request-response.frozen b/cassettes/features/v2/observability_pipelines/List-pipelines-returns-Bad-Request-response.frozen index a1ae2640d7db..e5c3681b9ae8 100644 --- a/cassettes/features/v2/observability_pipelines/List-pipelines-returns-Bad-Request-response.frozen +++ b/cassettes/features/v2/observability_pipelines/List-pipelines-returns-Bad-Request-response.frozen @@ -1 +1 @@ -2025-12-18T16:15:22.038Z \ No newline at end of file +2026-01-06T13:50:06.623Z \ No newline at end of file diff --git a/cassettes/features/v2/observability_pipelines/List-pipelines-returns-Bad-Request-response.yml b/cassettes/features/v2/observability_pipelines/List-pipelines-returns-Bad-Request-response.yml index 597549ed76f8..145a22831ac9 100644 --- a/cassettes/features/v2/observability_pipelines/List-pipelines-returns-Bad-Request-response.yml +++ b/cassettes/features/v2/observability_pipelines/List-pipelines-returns-Bad-Request-response.yml @@ -1,5 +1,5 @@ http_interactions: -- recorded_at: Thu, 18 Dec 2025 16:15:22 GMT +- recorded_at: Tue, 06 Jan 2026 13:50:06 GMT request: body: null headers: diff --git a/cassettes/features/v2/observability_pipelines/List-pipelines-returns-OK-response.frozen b/cassettes/features/v2/observability_pipelines/List-pipelines-returns-OK-response.frozen index fb745af4ce5f..1e45abc563fb 100644 --- a/cassettes/features/v2/observability_pipelines/List-pipelines-returns-OK-response.frozen +++ b/cassettes/features/v2/observability_pipelines/List-pipelines-returns-OK-response.frozen @@ -1 +1 @@ -2025-12-18T16:15:22.507Z \ No newline at end of file +2026-01-06T13:50:07.036Z \ No newline at end of file diff --git a/cassettes/features/v2/observability_pipelines/List-pipelines-returns-OK-response.yml b/cassettes/features/v2/observability_pipelines/List-pipelines-returns-OK-response.yml index 7e35e7aa83a4..aaa4104848d3 100644 --- a/cassettes/features/v2/observability_pipelines/List-pipelines-returns-OK-response.yml +++ b/cassettes/features/v2/observability_pipelines/List-pipelines-returns-OK-response.yml @@ -1,5 +1,5 @@ http_interactions: -- recorded_at: Thu, 18 Dec 2025 16:15:22 GMT +- recorded_at: Tue, 06 Jan 2026 13:50:07 GMT request: body: encoding: UTF-8 @@ -17,7 +17,7 @@ http_interactions: response: body: encoding: UTF-8 - string: '{"data":{"id":"c162e83e-dc2c-11f0-bf6b-da7ad0900002","type":"pipelines","attributes":{"name":"Main + string: '{"data":{"id":"9c5e6732-eb06-11f0-a803-da7ad0900002","type":"pipelines","attributes":{"name":"Main Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}} @@ -29,7 +29,7 @@ http_interactions: status: code: 201 message: Created -- recorded_at: Thu, 18 Dec 2025 16:15:22 GMT +- recorded_at: Tue, 06 Jan 2026 13:50:07 GMT request: body: null headers: @@ -40,10 +40,26 @@ http_interactions: response: body: encoding: UTF-8 - string: '{"data":[{"id":"c162e83e-dc2c-11f0-bf6b-da7ad0900002","type":"pipelines","attributes":{"name":"Main + string: '{"data":[{"id":"4bf478ba-dc68-11f0-87e9-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"a78e416a-de66-11f0-a039-da7ad0900002","type":"pipelines","attributes":{"name":"http-server-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["http-source-1"],"type":"datadog_logs"}],"processors":[],"sources":[{"auth_strategy":"plain","decoding":"json","id":"http-source-1","tls":{"ca_file":"/etc/ssl/certs/ca.crt","crt_file":"/etc/ssl/certs/http.crt","key_file":"/etc/ssl/private/http.key"},"type":"http_server"}]}}},{"id":"a84fd58c-de66-11f0-a03b-da7ad0900002","type":"pipelines","attributes":{"name":"amazon_s3-source-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["s3-source-1"],"type":"datadog_logs"}],"processors":[],"sources":[{"auth":{"assume_role":"arn:aws:iam::123456789012:role/test-role","external_id":"external-test-id","session_name":"session-test"},"id":"s3-source-1","region":"us-east-1","tls":{"ca_file":"/etc/ssl/certs/s3.ca","crt_file":"/etc/ssl/certs/s3.crt","key_file":"/etc/ssl/private/s3.key"},"type":"amazon_s3"}]}}},{"id":"a42e22e0-df49-11f0-81d5-da7ad0900002","type":"pipelines","attributes":{"name":"dedupe + pipeline","config":{"destinations":[{"id":"destination-1","inputs":["dedupe-group-2"],"type":"datadog_logs"}],"processors":[{"enabled":true,"id":"dedupe-group-1","include":"*","inputs":["source-1"],"processors":[{"enabled":true,"fields":["log.message","log.tags"],"id":"dedupe-match","include":"*","mode":"match","type":"dedupe"}]},{"enabled":true,"id":"dedupe-group-2","include":"*","inputs":["dedupe-group-1"],"processors":[{"enabled":true,"fields":["log.source","log.context"],"id":"dedupe-ignore","include":"*","mode":"ignore","type":"dedupe"}]}],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"2cd3c342-e0c2-11f0-9d34-da7ad0900002","type":"pipelines","attributes":{"name":"add-fields-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["add-fields-group-1"],"type":"datadog_logs"}],"processors":[{"enabled":true,"id":"add-fields-group-1","include":"*","inputs":["source-1"],"processors":[{"enabled":true,"fields":[{"name":"custom.field","value":"hello-world"},{"name":"env","value":"prod"}],"id":"add-fields-1","include":"*","type":"add_fields"}]}],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"20f4849c-e579-11f0-af79-da7ad0900002","type":"pipelines","attributes":{"name":"fluent-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["fluent-source-1"],"type":"datadog_logs"}],"processors":[],"sources":[{"id":"fluent-source-1","tls":{"ca_file":"/etc/ssl/certs/ca.crt","crt_file":"/etc/ssl/certs/fluent.crt","key_file":"/etc/ssl/private/fluent.key"},"type":"fluentd"}]}}},{"id":"15621afe-e669-11f0-bec3-da7ad0900002","type":"pipelines","attributes":{"name":"Main Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My - Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}],"meta":{"totalCount":1}} + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"dfbeb25a-e6c1-11f0-9bc1-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"923fbdb6-e771-11f0-9388-da7ad0900002","type":"pipelines","attributes":{"name":"http-client + pipeline","config":{"destinations":[{"id":"destination-1","inputs":["http-source-1"],"type":"datadog_logs"}],"processors":[],"sources":[{"auth_strategy":"basic","decoding":"json","id":"http-source-1","scrape_interval_secs":60,"scrape_timeout_secs":10,"tls":{"crt_file":"/path/to/http.crt"},"type":"http_client"}]}}},{"id":"a7b600ce-e771-11f0-939c-da7ad0900002","type":"pipelines","attributes":{"name":"newrelic + pipeline","config":{"destinations":[{"id":"destination-1","inputs":["source-1"],"region":"us","type":"new_relic"}],"processors":[],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"306bab4c-e904-11f0-aa8a-da7ad0900002","type":"pipelines","attributes":{"name":"splunk-hec-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["splunk-hec-source-1"],"type":"datadog_logs"}],"processors":[],"sources":[{"id":"splunk-hec-source-1","tls":{"ca_file":"/etc/ssl/certs/ca.crt","crt_file":"/etc/ssl/certs/splunk.crt","key_file":"/etc/ssl/private/splunk.key"},"type":"splunk_hec"}]}}},{"id":"51faefca-e922-11f0-a260-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"8d025dea-ea96-11f0-8a79-da7ad0900002","type":"pipelines","attributes":{"name":"crowdstrike-next-gen-siem-destination-pipeline-basic","config":{"destinations":[{"encoding":"raw_message","id":"crowdstrike-dest-basic-1","inputs":["source-1"],"type":"crowdstrike_next_gen_siem"}],"processors":[],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"ed4d493e-eabf-11f0-852d-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"9c5e6732-eb06-11f0-a803-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}],"meta":{"totalCount":15}} ' headers: @@ -52,14 +68,14 @@ http_interactions: status: code: 200 message: OK -- recorded_at: Thu, 18 Dec 2025 16:15:22 GMT +- recorded_at: Tue, 06 Jan 2026 13:50:07 GMT request: body: null headers: Accept: - '*/*' method: DELETE - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/c162e83e-dc2c-11f0-bf6b-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/9c5e6732-eb06-11f0-a803-da7ad0900002 response: body: encoding: UTF-8 diff --git a/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-Bad-Request-response.frozen b/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-Bad-Request-response.frozen index 92a0e9377f04..ef250224cdb4 100644 --- a/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-Bad-Request-response.frozen +++ b/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-Bad-Request-response.frozen @@ -1 +1 @@ -2025-12-18T16:15:24.455Z \ No newline at end of file +2026-01-06T13:50:09.046Z \ No newline at end of file diff --git a/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-Bad-Request-response.yml b/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-Bad-Request-response.yml index ac916bfc6dd7..89354b6529a7 100644 --- a/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-Bad-Request-response.yml +++ b/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-Bad-Request-response.yml @@ -1,5 +1,5 @@ http_interactions: -- recorded_at: Thu, 18 Dec 2025 16:15:24 GMT +- recorded_at: Tue, 06 Jan 2026 13:50:09 GMT request: body: encoding: UTF-8 @@ -17,7 +17,7 @@ http_interactions: response: body: encoding: UTF-8 - string: '{"data":{"id":"c28a5ad0-dc2c-11f0-bdee-da7ad0900002","type":"pipelines","attributes":{"name":"Main + string: '{"data":{"id":"9d8eebe0-eb06-11f0-b185-da7ad0900002","type":"pipelines","attributes":{"name":"Main Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}} @@ -29,7 +29,7 @@ http_interactions: status: code: 201 message: Created -- recorded_at: Thu, 18 Dec 2025 16:15:24 GMT +- recorded_at: Tue, 06 Jan 2026 13:50:09 GMT request: body: encoding: UTF-8 @@ -41,7 +41,7 @@ http_interactions: Content-Type: - application/json method: PUT - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/c28a5ad0-dc2c-11f0-bdee-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/9d8eebe0-eb06-11f0-b185-da7ad0900002 response: body: encoding: UTF-8 @@ -58,14 +58,14 @@ http_interactions: status: code: 400 message: Bad Request -- recorded_at: Thu, 18 Dec 2025 16:15:24 GMT +- recorded_at: Tue, 06 Jan 2026 13:50:09 GMT request: body: null headers: Accept: - '*/*' method: DELETE - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/c28a5ad0-dc2c-11f0-bdee-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/9d8eebe0-eb06-11f0-b185-da7ad0900002 response: body: encoding: UTF-8 diff --git a/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-Not-Found-response.frozen b/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-Not-Found-response.frozen index b083fd34ae0b..cea63a87769b 100644 --- a/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-Not-Found-response.frozen +++ b/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-Not-Found-response.frozen @@ -1 +1 @@ -2025-12-18T16:15:26.411Z \ No newline at end of file +2026-01-06T13:50:10.811Z \ No newline at end of file diff --git a/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-Not-Found-response.yml b/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-Not-Found-response.yml index e7c88d0aad7f..c3b646ab37eb 100644 --- a/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-Not-Found-response.yml +++ b/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-Not-Found-response.yml @@ -1,5 +1,5 @@ http_interactions: -- recorded_at: Thu, 18 Dec 2025 16:15:26 GMT +- recorded_at: Tue, 06 Jan 2026 13:50:10 GMT request: body: encoding: UTF-8 diff --git a/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-OK-response.frozen b/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-OK-response.frozen index b9cbd5fcac4f..e9d2e85c6bf9 100644 --- a/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-OK-response.frozen +++ b/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-OK-response.frozen @@ -1 +1 @@ -2025-12-18T16:15:26.929Z \ No newline at end of file +2026-01-06T13:50:11.272Z \ No newline at end of file diff --git a/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-OK-response.yml b/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-OK-response.yml index 3f765505ace2..03ec072f40c5 100644 --- a/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-OK-response.yml +++ b/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-OK-response.yml @@ -1,5 +1,5 @@ http_interactions: -- recorded_at: Thu, 18 Dec 2025 16:15:26 GMT +- recorded_at: Tue, 06 Jan 2026 13:50:11 GMT request: body: encoding: UTF-8 @@ -17,7 +17,7 @@ http_interactions: response: body: encoding: UTF-8 - string: '{"data":{"id":"c40401e0-dc2c-11f0-bf6d-da7ad0900002","type":"pipelines","attributes":{"name":"Main + string: '{"data":{"id":"9ee260d0-eb06-11f0-b187-da7ad0900002","type":"pipelines","attributes":{"name":"Main Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}} @@ -29,7 +29,7 @@ http_interactions: status: code: 201 message: Created -- recorded_at: Thu, 18 Dec 2025 16:15:26 GMT +- recorded_at: Tue, 06 Jan 2026 13:50:11 GMT request: body: encoding: UTF-8 @@ -41,11 +41,11 @@ http_interactions: Content-Type: - application/json method: PUT - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/c40401e0-dc2c-11f0-bf6d-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/9ee260d0-eb06-11f0-b187-da7ad0900002 response: body: encoding: UTF-8 - string: '{"data":{"id":"c40401e0-dc2c-11f0-bf6d-da7ad0900002","type":"pipelines","attributes":{"name":"Updated + string: '{"data":{"id":"9ee260d0-eb06-11f0-b187-da7ad0900002","type":"pipelines","attributes":{"name":"Updated Pipeline Name","config":{"destinations":[{"id":"updated-datadog-logs-destination-id","inputs":["my-processor-group"],"type":"datadog_logs"}],"processors":[{"enabled":true,"id":"my-processor-group","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}} ' @@ -55,14 +55,14 @@ http_interactions: status: code: 200 message: OK -- recorded_at: Thu, 18 Dec 2025 16:15:26 GMT +- recorded_at: Tue, 06 Jan 2026 13:50:11 GMT request: body: null headers: Accept: - '*/*' method: DELETE - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/c40401e0-dc2c-11f0-bf6d-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/9ee260d0-eb06-11f0-b187-da7ad0900002 response: body: encoding: UTF-8 diff --git a/cassettes/features/v2/observability_pipelines/Validate-an-observability-pipeline-returns-Bad-Request-response.frozen b/cassettes/features/v2/observability_pipelines/Validate-an-observability-pipeline-returns-Bad-Request-response.frozen index 745856e49a6d..78ec48334093 100644 --- a/cassettes/features/v2/observability_pipelines/Validate-an-observability-pipeline-returns-Bad-Request-response.frozen +++ b/cassettes/features/v2/observability_pipelines/Validate-an-observability-pipeline-returns-Bad-Request-response.frozen @@ -1 +1 @@ -2025-12-18T16:15:29.179Z \ No newline at end of file +2026-01-06T13:50:14.036Z \ No newline at end of file diff --git a/cassettes/features/v2/observability_pipelines/Validate-an-observability-pipeline-returns-Bad-Request-response.yml b/cassettes/features/v2/observability_pipelines/Validate-an-observability-pipeline-returns-Bad-Request-response.yml index 4db7a093b5c7..a88e77645d59 100644 --- a/cassettes/features/v2/observability_pipelines/Validate-an-observability-pipeline-returns-Bad-Request-response.yml +++ b/cassettes/features/v2/observability_pipelines/Validate-an-observability-pipeline-returns-Bad-Request-response.yml @@ -1,5 +1,5 @@ http_interactions: -- recorded_at: Thu, 18 Dec 2025 16:15:29 GMT +- recorded_at: Tue, 06 Jan 2026 13:50:14 GMT request: body: encoding: UTF-8 diff --git a/cassettes/features/v2/observability_pipelines/Validate-an-observability-pipeline-returns-OK-response.frozen b/cassettes/features/v2/observability_pipelines/Validate-an-observability-pipeline-returns-OK-response.frozen index ecfcc54162aa..8b15ab7629c1 100644 --- a/cassettes/features/v2/observability_pipelines/Validate-an-observability-pipeline-returns-OK-response.frozen +++ b/cassettes/features/v2/observability_pipelines/Validate-an-observability-pipeline-returns-OK-response.frozen @@ -1 +1 @@ -2025-12-18T16:15:29.647Z \ No newline at end of file +2026-01-06T13:50:14.439Z \ No newline at end of file diff --git a/cassettes/features/v2/observability_pipelines/Validate-an-observability-pipeline-returns-OK-response.yml b/cassettes/features/v2/observability_pipelines/Validate-an-observability-pipeline-returns-OK-response.yml index 28bb324a6496..34f2caa0a7bd 100644 --- a/cassettes/features/v2/observability_pipelines/Validate-an-observability-pipeline-returns-OK-response.yml +++ b/cassettes/features/v2/observability_pipelines/Validate-an-observability-pipeline-returns-OK-response.yml @@ -1,5 +1,5 @@ http_interactions: -- recorded_at: Thu, 18 Dec 2025 16:15:29 GMT +- recorded_at: Tue, 06 Jan 2026 13:50:14 GMT request: body: encoding: UTF-8 diff --git a/lib/datadog_api_client/inflector.rb b/lib/datadog_api_client/inflector.rb index 022391df7878..3dd2b8e5c3ee 100644 --- a/lib/datadog_api_client/inflector.rb +++ b/lib/datadog_api_client/inflector.rb @@ -1985,21 +1985,27 @@ def overrides "v2.domain_allowlist_response_data" => "DomainAllowlistResponseData", "v2.domain_allowlist_response_data_attributes" => "DomainAllowlistResponseDataAttributes", "v2.domain_allowlist_type" => "DomainAllowlistType", + "v2.dora_deployment_fetch_response" => "DORADeploymentFetchResponse", + "v2.dora_deployment_object" => "DORADeploymentObject", + "v2.dora_deployment_object_attributes" => "DORADeploymentObjectAttributes", "v2.dora_deployment_request" => "DORADeploymentRequest", "v2.dora_deployment_request_attributes" => "DORADeploymentRequestAttributes", "v2.dora_deployment_request_data" => "DORADeploymentRequestData", "v2.dora_deployment_response" => "DORADeploymentResponse", "v2.dora_deployment_response_data" => "DORADeploymentResponseData", + "v2.dora_deployments_list_response" => "DORADeploymentsListResponse", "v2.dora_deployment_type" => "DORADeploymentType", - "v2.dora_event" => "DORAEvent", + "v2.dora_failure_fetch_response" => "DORAFailureFetchResponse", "v2.dora_failure_request" => "DORAFailureRequest", "v2.dora_failure_request_attributes" => "DORAFailureRequestAttributes", "v2.dora_failure_request_data" => "DORAFailureRequestData", "v2.dora_failure_response" => "DORAFailureResponse", "v2.dora_failure_response_data" => "DORAFailureResponseData", + "v2.dora_failures_list_response" => "DORAFailuresListResponse", "v2.dora_failure_type" => "DORAFailureType", - "v2.dora_fetch_response" => "DORAFetchResponse", "v2.dora_git_info" => "DORAGitInfo", + "v2.dora_incident_object" => "DORAIncidentObject", + "v2.dora_incident_object_attributes" => "DORAIncidentObjectAttributes", "v2.dora_list_deployments_request" => "DORAListDeploymentsRequest", "v2.dora_list_deployments_request_attributes" => "DORAListDeploymentsRequestAttributes", "v2.dora_list_deployments_request_data" => "DORAListDeploymentsRequestData", @@ -2008,7 +2014,6 @@ def overrides "v2.dora_list_failures_request_attributes" => "DORAListFailuresRequestAttributes", "v2.dora_list_failures_request_data" => "DORAListFailuresRequestData", "v2.dora_list_failures_request_data_type" => "DORAListFailuresRequestDataType", - "v2.dora_list_response" => "DORAListResponse", "v2.downtime_create_request" => "DowntimeCreateRequest", "v2.downtime_create_request_attributes" => "DowntimeCreateRequestAttributes", "v2.downtime_create_request_data" => "DowntimeCreateRequestData", @@ -3153,6 +3158,10 @@ def overrides "v2.observability_pipeline_amazon_security_lake_destination" => "ObservabilityPipelineAmazonSecurityLakeDestination", "v2.observability_pipeline_amazon_security_lake_destination_type" => "ObservabilityPipelineAmazonSecurityLakeDestinationType", "v2.observability_pipeline_aws_auth" => "ObservabilityPipelineAwsAuth", + "v2.observability_pipeline_buffer_options" => "ObservabilityPipelineBufferOptions", + "v2.observability_pipeline_buffer_options_disk_type" => "ObservabilityPipelineBufferOptionsDiskType", + "v2.observability_pipeline_buffer_options_memory_type" => "ObservabilityPipelineBufferOptionsMemoryType", + "v2.observability_pipeline_buffer_options_when_full" => "ObservabilityPipelineBufferOptionsWhenFull", "v2.observability_pipeline_config" => "ObservabilityPipelineConfig", "v2.observability_pipeline_config_destination_item" => "ObservabilityPipelineConfigDestinationItem", "v2.observability_pipeline_config_processor_group" => "ObservabilityPipelineConfigProcessorGroup", @@ -3180,6 +3189,7 @@ def overrides "v2.observability_pipeline_dedupe_processor" => "ObservabilityPipelineDedupeProcessor", "v2.observability_pipeline_dedupe_processor_mode" => "ObservabilityPipelineDedupeProcessorMode", "v2.observability_pipeline_dedupe_processor_type" => "ObservabilityPipelineDedupeProcessorType", + "v2.observability_pipeline_disk_buffer_options" => "ObservabilityPipelineDiskBufferOptions", "v2.observability_pipeline_elasticsearch_destination" => "ObservabilityPipelineElasticsearchDestination", "v2.observability_pipeline_elasticsearch_destination_api_version" => "ObservabilityPipelineElasticsearchDestinationApiVersion", "v2.observability_pipeline_elasticsearch_destination_type" => "ObservabilityPipelineElasticsearchDestinationType", @@ -3233,6 +3243,8 @@ def overrides "v2.observability_pipeline_kafka_source_type" => "ObservabilityPipelineKafkaSourceType", "v2.observability_pipeline_logstash_source" => "ObservabilityPipelineLogstashSource", "v2.observability_pipeline_logstash_source_type" => "ObservabilityPipelineLogstashSourceType", + "v2.observability_pipeline_memory_buffer_options" => "ObservabilityPipelineMemoryBufferOptions", + "v2.observability_pipeline_memory_buffer_size_options" => "ObservabilityPipelineMemoryBufferSizeOptions", "v2.observability_pipeline_metadata_entry" => "ObservabilityPipelineMetadataEntry", "v2.observability_pipeline_metric_value" => "ObservabilityPipelineMetricValue", "v2.observability_pipeline_new_relic_destination" => "ObservabilityPipelineNewRelicDestination", diff --git a/lib/datadog_api_client/v2/api/dora_metrics_api.rb b/lib/datadog_api_client/v2/api/dora_metrics_api.rb index 6f96bce600cb..7ca71f17eae0 100644 --- a/lib/datadog_api_client/v2/api/dora_metrics_api.rb +++ b/lib/datadog_api_client/v2/api/dora_metrics_api.rb @@ -386,7 +386,7 @@ def get_dora_deployment(deployment_id, opts = {}) # # @param deployment_id [String] The ID of the deployment event. # @param opts [Hash] the optional parameters - # @return [Array<(DORAFetchResponse, Integer, Hash)>] DORAFetchResponse data, response status code and response headers + # @return [Array<(DORADeploymentFetchResponse, Integer, Hash)>] DORADeploymentFetchResponse data, response status code and response headers def get_dora_deployment_with_http_info(deployment_id, opts = {}) if @api_client.config.debugging @@ -414,7 +414,7 @@ def get_dora_deployment_with_http_info(deployment_id, opts = {}) post_body = opts[:debug_body] # return_type - return_type = opts[:debug_return_type] || 'DORAFetchResponse' + return_type = opts[:debug_return_type] || 'DORADeploymentFetchResponse' # auth_names auth_names = opts[:debug_auth_names] || [:apiKeyAuth, :appKeyAuth] @@ -451,7 +451,7 @@ def get_dora_failure(failure_id, opts = {}) # # @param failure_id [String] The ID of the failure event. # @param opts [Hash] the optional parameters - # @return [Array<(DORAFetchResponse, Integer, Hash)>] DORAFetchResponse data, response status code and response headers + # @return [Array<(DORAFailureFetchResponse, Integer, Hash)>] DORAFailureFetchResponse data, response status code and response headers def get_dora_failure_with_http_info(failure_id, opts = {}) if @api_client.config.debugging @@ -479,7 +479,7 @@ def get_dora_failure_with_http_info(failure_id, opts = {}) post_body = opts[:debug_body] # return_type - return_type = opts[:debug_return_type] || 'DORAFetchResponse' + return_type = opts[:debug_return_type] || 'DORAFailureFetchResponse' # auth_names auth_names = opts[:debug_auth_names] || [:apiKeyAuth, :appKeyAuth] @@ -516,7 +516,7 @@ def list_dora_deployments(body, opts = {}) # # @param body [DORAListDeploymentsRequest] # @param opts [Hash] the optional parameters - # @return [Array<(DORAListResponse, Integer, Hash)>] DORAListResponse data, response status code and response headers + # @return [Array<(DORADeploymentsListResponse, Integer, Hash)>] DORADeploymentsListResponse data, response status code and response headers def list_dora_deployments_with_http_info(body, opts = {}) if @api_client.config.debugging @@ -546,7 +546,7 @@ def list_dora_deployments_with_http_info(body, opts = {}) post_body = opts[:debug_body] || @api_client.object_to_http_body(body) # return_type - return_type = opts[:debug_return_type] || 'DORAListResponse' + return_type = opts[:debug_return_type] || 'DORADeploymentsListResponse' # auth_names auth_names = opts[:debug_auth_names] || [:apiKeyAuth, :appKeyAuth] @@ -583,7 +583,7 @@ def list_dora_failures(body, opts = {}) # # @param body [DORAListFailuresRequest] # @param opts [Hash] the optional parameters - # @return [Array<(DORAListResponse, Integer, Hash)>] DORAListResponse data, response status code and response headers + # @return [Array<(DORAFailuresListResponse, Integer, Hash)>] DORAFailuresListResponse data, response status code and response headers def list_dora_failures_with_http_info(body, opts = {}) if @api_client.config.debugging @@ -613,7 +613,7 @@ def list_dora_failures_with_http_info(body, opts = {}) post_body = opts[:debug_body] || @api_client.object_to_http_body(body) # return_type - return_type = opts[:debug_return_type] || 'DORAListResponse' + return_type = opts[:debug_return_type] || 'DORAFailuresListResponse' # auth_names auth_names = opts[:debug_auth_names] || [:apiKeyAuth, :appKeyAuth] diff --git a/lib/datadog_api_client/v2/models/azure_storage_destination.rb b/lib/datadog_api_client/v2/models/azure_storage_destination.rb index ef29c2cade85..98477b8c88cf 100644 --- a/lib/datadog_api_client/v2/models/azure_storage_destination.rb +++ b/lib/datadog_api_client/v2/models/azure_storage_destination.rb @@ -24,6 +24,9 @@ class AzureStorageDestination # Optional prefix for blobs written to the container. attr_accessor :blob_prefix + # Configuration for buffer settings on destination components. + attr_accessor :buffer + # The name of the Azure Blob Storage container to store logs in. attr_reader :container_name @@ -43,6 +46,7 @@ class AzureStorageDestination def self.attribute_map { :'blob_prefix' => :'blob_prefix', + :'buffer' => :'buffer', :'container_name' => :'container_name', :'id' => :'id', :'inputs' => :'inputs', @@ -55,6 +59,7 @@ def self.attribute_map def self.openapi_types { :'blob_prefix' => :'String', + :'buffer' => :'ObservabilityPipelineBufferOptions', :'container_name' => :'String', :'id' => :'String', :'inputs' => :'Array', @@ -84,6 +89,10 @@ def initialize(attributes = {}) self.blob_prefix = attributes[:'blob_prefix'] end + if attributes.key?(:'buffer') + self.buffer = attributes[:'buffer'] + end + if attributes.key?(:'container_name') self.container_name = attributes[:'container_name'] end @@ -181,6 +190,7 @@ def ==(o) return true if self.equal?(o) self.class == o.class && blob_prefix == o.blob_prefix && + buffer == o.buffer && container_name == o.container_name && id == o.id && inputs == o.inputs && @@ -192,7 +202,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [blob_prefix, container_name, id, inputs, type, additional_properties].hash + [blob_prefix, buffer, container_name, id, inputs, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/dora_deployment_fetch_response.rb b/lib/datadog_api_client/v2/models/dora_deployment_fetch_response.rb new file mode 100644 index 000000000000..ed9c4746fa13 --- /dev/null +++ b/lib/datadog_api_client/v2/models/dora_deployment_fetch_response.rb @@ -0,0 +1,105 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Response for fetching a single deployment event. + class DORADeploymentFetchResponse + include BaseGenericModel + + # A DORA deployment event. + attr_accessor :data + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'data' => :'data' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'data' => :'DORADeploymentObject' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::DORADeploymentFetchResponse` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'data') + self.data = attributes[:'data'] + end + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + data == o.data && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [data, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/dora_deployment_object.rb b/lib/datadog_api_client/v2/models/dora_deployment_object.rb new file mode 100644 index 000000000000..1cd916ae4ec5 --- /dev/null +++ b/lib/datadog_api_client/v2/models/dora_deployment_object.rb @@ -0,0 +1,125 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # A DORA deployment event. + class DORADeploymentObject + include BaseGenericModel + + # The attributes of the deployment event. + attr_accessor :attributes + + # The ID of the deployment event. + attr_accessor :id + + # JSON:API type for DORA deployment events. + attr_accessor :type + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'attributes' => :'attributes', + :'id' => :'id', + :'type' => :'type' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'attributes' => :'DORADeploymentObjectAttributes', + :'id' => :'String', + :'type' => :'DORADeploymentType' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::DORADeploymentObject` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'attributes') + self.attributes = attributes[:'attributes'] + end + + if attributes.key?(:'id') + self.id = attributes[:'id'] + end + + if attributes.key?(:'type') + self.type = attributes[:'type'] + end + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + attributes == o.attributes && + id == o.id && + type == o.type && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [attributes, id, type, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/dora_deployment_object_attributes.rb b/lib/datadog_api_client/v2/models/dora_deployment_object_attributes.rb new file mode 100644 index 000000000000..4f9e8f9d2e34 --- /dev/null +++ b/lib/datadog_api_client/v2/models/dora_deployment_object_attributes.rb @@ -0,0 +1,225 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The attributes of the deployment event. + class DORADeploymentObjectAttributes + include BaseGenericModel + + # A list of user-defined tags. The tags must follow the `key:value` pattern. Up to 100 may be added per event. + attr_accessor :custom_tags + + # Environment name to where the service was deployed. + attr_accessor :env + + # Unix timestamp when the deployment finished. + attr_reader :finished_at + + # Git info for DORA Metrics events. + attr_accessor :git + + # Service name. + attr_reader :service + + # Unix timestamp when the deployment started. + attr_reader :started_at + + # Name of the team owning the deployed service. + attr_accessor :team + + # Version to correlate with APM Deployment Tracking. + attr_accessor :version + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'custom_tags' => :'custom_tags', + :'env' => :'env', + :'finished_at' => :'finished_at', + :'git' => :'git', + :'service' => :'service', + :'started_at' => :'started_at', + :'team' => :'team', + :'version' => :'version' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'custom_tags' => :'Array', + :'env' => :'String', + :'finished_at' => :'Integer', + :'git' => :'DORAGitInfo', + :'service' => :'String', + :'started_at' => :'Integer', + :'team' => :'String', + :'version' => :'String' + } + end + + # List of attributes with nullable: true + # @!visibility private + def self.openapi_nullable + Set.new([ + :'custom_tags', + ]) + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::DORADeploymentObjectAttributes` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'custom_tags') + if (value = attributes[:'custom_tags']).is_a?(Array) + self.custom_tags = value + end + end + + if attributes.key?(:'env') + self.env = attributes[:'env'] + end + + if attributes.key?(:'finished_at') + self.finished_at = attributes[:'finished_at'] + end + + if attributes.key?(:'git') + self.git = attributes[:'git'] + end + + if attributes.key?(:'service') + self.service = attributes[:'service'] + end + + if attributes.key?(:'started_at') + self.started_at = attributes[:'started_at'] + end + + if attributes.key?(:'team') + self.team = attributes[:'team'] + end + + if attributes.key?(:'version') + self.version = attributes[:'version'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @finished_at.nil? + return false if @service.nil? + return false if @started_at.nil? + true + end + + # Custom attribute writer method with validation + # @param finished_at [Object] Object to be assigned + # @!visibility private + def finished_at=(finished_at) + if finished_at.nil? + fail ArgumentError, 'invalid value for "finished_at", finished_at cannot be nil.' + end + @finished_at = finished_at + end + + # Custom attribute writer method with validation + # @param service [Object] Object to be assigned + # @!visibility private + def service=(service) + if service.nil? + fail ArgumentError, 'invalid value for "service", service cannot be nil.' + end + @service = service + end + + # Custom attribute writer method with validation + # @param started_at [Object] Object to be assigned + # @!visibility private + def started_at=(started_at) + if started_at.nil? + fail ArgumentError, 'invalid value for "started_at", started_at cannot be nil.' + end + @started_at = started_at + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + custom_tags == o.custom_tags && + env == o.env && + finished_at == o.finished_at && + git == o.git && + service == o.service && + started_at == o.started_at && + team == o.team && + version == o.version && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [custom_tags, env, finished_at, git, service, started_at, team, version, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/dora_deployments_list_response.rb b/lib/datadog_api_client/v2/models/dora_deployments_list_response.rb new file mode 100644 index 000000000000..785902c3d546 --- /dev/null +++ b/lib/datadog_api_client/v2/models/dora_deployments_list_response.rb @@ -0,0 +1,107 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Response for the list deployments endpoint. + class DORADeploymentsListResponse + include BaseGenericModel + + # The list of DORA deployment events. + attr_accessor :data + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'data' => :'data' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'data' => :'Array' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::DORADeploymentsListResponse` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'data') + if (value = attributes[:'data']).is_a?(Array) + self.data = value + end + end + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + data == o.data && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [data, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/dora_fetch_response.rb b/lib/datadog_api_client/v2/models/dora_failure_fetch_response.rb similarity index 92% rename from lib/datadog_api_client/v2/models/dora_fetch_response.rb rename to lib/datadog_api_client/v2/models/dora_failure_fetch_response.rb index 72f97bd00e9f..5fb7e9a1693c 100644 --- a/lib/datadog_api_client/v2/models/dora_fetch_response.rb +++ b/lib/datadog_api_client/v2/models/dora_failure_fetch_response.rb @@ -17,11 +17,11 @@ require 'time' module DatadogAPIClient::V2 - # Response for the DORA fetch endpoints. - class DORAFetchResponse + # Response for fetching a single failure event. + class DORAFailureFetchResponse include BaseGenericModel - # A DORA event. + # A DORA incident event. attr_accessor :data attr_accessor :additional_properties @@ -38,7 +38,7 @@ def self.attribute_map # @!visibility private def self.openapi_types { - :'data' => :'DORAEvent' + :'data' => :'DORAIncidentObject' } end @@ -47,7 +47,7 @@ def self.openapi_types # @!visibility private def initialize(attributes = {}) if (!attributes.is_a?(Hash)) - fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::DORAFetchResponse` initialize method" + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::DORAFailureFetchResponse` initialize method" end self.additional_properties = {} diff --git a/lib/datadog_api_client/v2/models/dora_list_response.rb b/lib/datadog_api_client/v2/models/dora_failures_list_response.rb similarity index 91% rename from lib/datadog_api_client/v2/models/dora_list_response.rb rename to lib/datadog_api_client/v2/models/dora_failures_list_response.rb index e4ae63e25eda..dba0b4db3ca0 100644 --- a/lib/datadog_api_client/v2/models/dora_list_response.rb +++ b/lib/datadog_api_client/v2/models/dora_failures_list_response.rb @@ -17,11 +17,11 @@ require 'time' module DatadogAPIClient::V2 - # Response for the DORA list endpoints. - class DORAListResponse + # Response for the list failures endpoint. + class DORAFailuresListResponse include BaseGenericModel - # The list of DORA events. + # The list of DORA incident events. attr_accessor :data attr_accessor :additional_properties @@ -38,7 +38,7 @@ def self.attribute_map # @!visibility private def self.openapi_types { - :'data' => :'Array' + :'data' => :'Array' } end @@ -47,7 +47,7 @@ def self.openapi_types # @!visibility private def initialize(attributes = {}) if (!attributes.is_a?(Hash)) - fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::DORAListResponse` initialize method" + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::DORAFailuresListResponse` initialize method" end self.additional_properties = {} diff --git a/lib/datadog_api_client/v2/models/dora_event.rb b/lib/datadog_api_client/v2/models/dora_incident_object.rb similarity index 90% rename from lib/datadog_api_client/v2/models/dora_event.rb rename to lib/datadog_api_client/v2/models/dora_incident_object.rb index aa3b5d53c047..a3ab762d30c2 100644 --- a/lib/datadog_api_client/v2/models/dora_event.rb +++ b/lib/datadog_api_client/v2/models/dora_incident_object.rb @@ -17,17 +17,17 @@ require 'time' module DatadogAPIClient::V2 - # A DORA event. - class DORAEvent + # A DORA incident event. + class DORAIncidentObject include BaseGenericModel - # The attributes of the event. + # The attributes of the incident event. attr_accessor :attributes - # The ID of the event. + # The ID of the incident event. attr_accessor :id - # The type of the event. + # JSON:API type for DORA failure events. attr_accessor :type attr_accessor :additional_properties @@ -46,9 +46,9 @@ def self.attribute_map # @!visibility private def self.openapi_types { - :'attributes' => :'Object', + :'attributes' => :'DORAIncidentObjectAttributes', :'id' => :'String', - :'type' => :'String' + :'type' => :'DORAFailureType' } end @@ -57,7 +57,7 @@ def self.openapi_types # @!visibility private def initialize(attributes = {}) if (!attributes.is_a?(Hash)) - fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::DORAEvent` initialize method" + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::DORAIncidentObject` initialize method" end self.additional_properties = {} diff --git a/lib/datadog_api_client/v2/models/dora_incident_object_attributes.rb b/lib/datadog_api_client/v2/models/dora_incident_object_attributes.rb new file mode 100644 index 000000000000..9a3294c34d22 --- /dev/null +++ b/lib/datadog_api_client/v2/models/dora_incident_object_attributes.rb @@ -0,0 +1,225 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The attributes of the incident event. + class DORAIncidentObjectAttributes + include BaseGenericModel + + # A list of user-defined tags. The tags must follow the `key:value` pattern. Up to 100 may be added per event. + attr_accessor :custom_tags + + # Environment name that was impacted by the incident. + attr_accessor :env + + # Unix timestamp when the incident finished. + attr_accessor :finished_at + + # Git info for DORA Metrics events. + attr_accessor :git + + # Incident name. + attr_accessor :name + + # Service names impacted by the incident. + attr_accessor :services + + # Incident severity. + attr_accessor :severity + + # Unix timestamp when the incident started. + attr_reader :started_at + + # Name of the team owning the services impacted. + attr_accessor :team + + # Version to correlate with APM Deployment Tracking. + attr_accessor :version + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'custom_tags' => :'custom_tags', + :'env' => :'env', + :'finished_at' => :'finished_at', + :'git' => :'git', + :'name' => :'name', + :'services' => :'services', + :'severity' => :'severity', + :'started_at' => :'started_at', + :'team' => :'team', + :'version' => :'version' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'custom_tags' => :'Array', + :'env' => :'String', + :'finished_at' => :'Integer', + :'git' => :'DORAGitInfo', + :'name' => :'String', + :'services' => :'Array', + :'severity' => :'String', + :'started_at' => :'Integer', + :'team' => :'String', + :'version' => :'String' + } + end + + # List of attributes with nullable: true + # @!visibility private + def self.openapi_nullable + Set.new([ + :'custom_tags', + ]) + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::DORAIncidentObjectAttributes` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'custom_tags') + if (value = attributes[:'custom_tags']).is_a?(Array) + self.custom_tags = value + end + end + + if attributes.key?(:'env') + self.env = attributes[:'env'] + end + + if attributes.key?(:'finished_at') + self.finished_at = attributes[:'finished_at'] + end + + if attributes.key?(:'git') + self.git = attributes[:'git'] + end + + if attributes.key?(:'name') + self.name = attributes[:'name'] + end + + if attributes.key?(:'services') + if (value = attributes[:'services']).is_a?(Array) + self.services = value + end + end + + if attributes.key?(:'severity') + self.severity = attributes[:'severity'] + end + + if attributes.key?(:'started_at') + self.started_at = attributes[:'started_at'] + end + + if attributes.key?(:'team') + self.team = attributes[:'team'] + end + + if attributes.key?(:'version') + self.version = attributes[:'version'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @started_at.nil? + true + end + + # Custom attribute writer method with validation + # @param started_at [Object] Object to be assigned + # @!visibility private + def started_at=(started_at) + if started_at.nil? + fail ArgumentError, 'invalid value for "started_at", started_at cannot be nil.' + end + @started_at = started_at + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + custom_tags == o.custom_tags && + env == o.env && + finished_at == o.finished_at && + git == o.git && + name == o.name && + services == o.services && + severity == o.severity && + started_at == o.started_at && + team == o.team && + version == o.version && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [custom_tags, env, finished_at, git, name, services, severity, started_at, team, version, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/microsoft_sentinel_destination.rb b/lib/datadog_api_client/v2/models/microsoft_sentinel_destination.rb index 03266b733402..e598f24bf7f2 100644 --- a/lib/datadog_api_client/v2/models/microsoft_sentinel_destination.rb +++ b/lib/datadog_api_client/v2/models/microsoft_sentinel_destination.rb @@ -21,6 +21,9 @@ module DatadogAPIClient::V2 class MicrosoftSentinelDestination include BaseGenericModel + # Configuration for buffer settings on destination components. + attr_accessor :buffer + # Azure AD client ID used for authentication. attr_reader :client_id @@ -48,6 +51,7 @@ class MicrosoftSentinelDestination # @!visibility private def self.attribute_map { + :'buffer' => :'buffer', :'client_id' => :'client_id', :'dcr_immutable_id' => :'dcr_immutable_id', :'id' => :'id', @@ -62,6 +66,7 @@ def self.attribute_map # @!visibility private def self.openapi_types { + :'buffer' => :'ObservabilityPipelineBufferOptions', :'client_id' => :'String', :'dcr_immutable_id' => :'String', :'id' => :'String', @@ -90,6 +95,10 @@ def initialize(attributes = {}) end } + if attributes.key?(:'buffer') + self.buffer = attributes[:'buffer'] + end + if attributes.key?(:'client_id') self.client_id = attributes[:'client_id'] end @@ -231,6 +240,7 @@ def to_hash def ==(o) return true if self.equal?(o) self.class == o.class && + buffer == o.buffer && client_id == o.client_id && dcr_immutable_id == o.dcr_immutable_id && id == o.id && @@ -245,7 +255,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [client_id, dcr_immutable_id, id, inputs, table, tenant_id, type, additional_properties].hash + [buffer, client_id, dcr_immutable_id, id, inputs, table, tenant_id, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_amazon_open_search_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_amazon_open_search_destination.rb index ab8721363e09..5c1e8cf6396d 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_amazon_open_search_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_amazon_open_search_destination.rb @@ -25,6 +25,9 @@ class ObservabilityPipelineAmazonOpenSearchDestination # The `strategy` field determines whether basic or AWS-based authentication is used. attr_reader :auth + # Configuration for buffer settings on destination components. + attr_accessor :buffer + # The index to write logs to. attr_accessor :bulk_index @@ -44,6 +47,7 @@ class ObservabilityPipelineAmazonOpenSearchDestination def self.attribute_map { :'auth' => :'auth', + :'buffer' => :'buffer', :'bulk_index' => :'bulk_index', :'id' => :'id', :'inputs' => :'inputs', @@ -56,6 +60,7 @@ def self.attribute_map def self.openapi_types { :'auth' => :'ObservabilityPipelineAmazonOpenSearchDestinationAuth', + :'buffer' => :'ObservabilityPipelineBufferOptions', :'bulk_index' => :'String', :'id' => :'String', :'inputs' => :'Array', @@ -85,6 +90,10 @@ def initialize(attributes = {}) self.auth = attributes[:'auth'] end + if attributes.key?(:'buffer') + self.buffer = attributes[:'buffer'] + end + if attributes.key?(:'bulk_index') self.bulk_index = attributes[:'bulk_index'] end @@ -182,6 +191,7 @@ def ==(o) return true if self.equal?(o) self.class == o.class && auth == o.auth && + buffer == o.buffer && bulk_index == o.bulk_index && id == o.id && inputs == o.inputs && @@ -193,7 +203,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [auth, bulk_index, id, inputs, type, additional_properties].hash + [auth, buffer, bulk_index, id, inputs, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_amazon_s3_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_amazon_s3_destination.rb index 6e74bafcd33b..86d276dd5531 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_amazon_s3_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_amazon_s3_destination.rb @@ -28,6 +28,9 @@ class ObservabilityPipelineAmazonS3Destination # S3 bucket name. attr_reader :bucket + # Configuration for buffer settings on destination components. + attr_accessor :buffer + # Unique identifier for the destination component. attr_reader :id @@ -57,6 +60,7 @@ def self.attribute_map { :'auth' => :'auth', :'bucket' => :'bucket', + :'buffer' => :'buffer', :'id' => :'id', :'inputs' => :'inputs', :'key_prefix' => :'key_prefix', @@ -73,6 +77,7 @@ def self.openapi_types { :'auth' => :'ObservabilityPipelineAwsAuth', :'bucket' => :'String', + :'buffer' => :'ObservabilityPipelineBufferOptions', :'id' => :'String', :'inputs' => :'Array', :'key_prefix' => :'String', @@ -109,6 +114,10 @@ def initialize(attributes = {}) self.bucket = attributes[:'bucket'] end + if attributes.key?(:'buffer') + self.buffer = attributes[:'buffer'] + end + if attributes.key?(:'id') self.id = attributes[:'id'] end @@ -241,6 +250,7 @@ def ==(o) self.class == o.class && auth == o.auth && bucket == o.bucket && + buffer == o.buffer && id == o.id && inputs == o.inputs && key_prefix == o.key_prefix && @@ -255,7 +265,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [auth, bucket, id, inputs, key_prefix, region, storage_class, tls, type, additional_properties].hash + [auth, bucket, buffer, id, inputs, key_prefix, region, storage_class, tls, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_amazon_security_lake_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_amazon_security_lake_destination.rb index 6904637a9723..dc48d7f929b7 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_amazon_security_lake_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_amazon_security_lake_destination.rb @@ -28,6 +28,9 @@ class ObservabilityPipelineAmazonSecurityLakeDestination # Name of the Amazon S3 bucket in Security Lake (3-63 characters). attr_reader :bucket + # Configuration for buffer settings on destination components. + attr_accessor :buffer + # Custom source name for the logs in Security Lake. attr_reader :custom_source_name @@ -54,6 +57,7 @@ def self.attribute_map { :'auth' => :'auth', :'bucket' => :'bucket', + :'buffer' => :'buffer', :'custom_source_name' => :'custom_source_name', :'id' => :'id', :'inputs' => :'inputs', @@ -69,6 +73,7 @@ def self.openapi_types { :'auth' => :'ObservabilityPipelineAwsAuth', :'bucket' => :'String', + :'buffer' => :'ObservabilityPipelineBufferOptions', :'custom_source_name' => :'String', :'id' => :'String', :'inputs' => :'Array', @@ -104,6 +109,10 @@ def initialize(attributes = {}) self.bucket = attributes[:'bucket'] end + if attributes.key?(:'buffer') + self.buffer = attributes[:'buffer'] + end + if attributes.key?(:'custom_source_name') self.custom_source_name = attributes[:'custom_source_name'] end @@ -232,6 +241,7 @@ def ==(o) self.class == o.class && auth == o.auth && bucket == o.bucket && + buffer == o.buffer && custom_source_name == o.custom_source_name && id == o.id && inputs == o.inputs && @@ -245,7 +255,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [auth, bucket, custom_source_name, id, inputs, region, tls, type, additional_properties].hash + [auth, bucket, buffer, custom_source_name, id, inputs, region, tls, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_buffer_options.rb b/lib/datadog_api_client/v2/models/observability_pipeline_buffer_options.rb new file mode 100644 index 000000000000..ef7445c56a0c --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_buffer_options.rb @@ -0,0 +1,64 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Configuration for buffer settings on destination components. + module ObservabilityPipelineBufferOptions + class << self + include BaseOneOfModel + include BaseOneOfModelNoDiscriminator + + # List of class defined in oneOf (OpenAPI v3) + def openapi_one_of + [ + :'ObservabilityPipelineDiskBufferOptions', + :'ObservabilityPipelineMemoryBufferOptions', + :'ObservabilityPipelineMemoryBufferSizeOptions' + ] + end + # Builds the object + # @param data [Mixed] Data to be matched against the list of oneOf items + # @return [Object] Returns the model or the data itself + def build(data) + # Go through the list of oneOf items and attempt to identify the appropriate one. + # Note: + # - We do not attempt to check whether exactly one item matches. + # - No advanced validation of types in some cases (e.g. "x: { type: string }" will happily match { x: 123 }) + # due to the way the deserialization is made in the base_object template (it just casts without verifying). + # - TODO: scalar values are de facto behaving as if they were nullable. + # - TODO: logging when debugging is set. + openapi_one_of.each do |klass| + begin + next if klass == :AnyType # "nullable: true" + typed_data = find_and_cast_into_type(klass, data) + next if typed_data.respond_to?(:_unparsed) && typed_data._unparsed + return typed_data if typed_data + rescue # rescue all errors so we keep iterating even if the current item lookup raises + end + end + + if openapi_one_of.include?(:AnyType) + data + else + self._unparsed = true + DatadogAPIClient::UnparsedObject.new(data) + end + end + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_buffer_options_disk_type.rb b/lib/datadog_api_client/v2/models/observability_pipeline_buffer_options_disk_type.rb new file mode 100644 index 000000000000..efe88cd7ce76 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_buffer_options_disk_type.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The type of the buffer that will be configured, a disk buffer. + class ObservabilityPipelineBufferOptionsDiskType + include BaseEnumModel + + DISK = "disk".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_buffer_options_memory_type.rb b/lib/datadog_api_client/v2/models/observability_pipeline_buffer_options_memory_type.rb new file mode 100644 index 000000000000..e11826e973fc --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_buffer_options_memory_type.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The type of the buffer that will be configured, a memory buffer. + class ObservabilityPipelineBufferOptionsMemoryType + include BaseEnumModel + + MEMORY = "memory".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_buffer_options_when_full.rb b/lib/datadog_api_client/v2/models/observability_pipeline_buffer_options_when_full.rb new file mode 100644 index 000000000000..38d6af94c8bf --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_buffer_options_when_full.rb @@ -0,0 +1,27 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Behavior when the buffer is full (block and stop accepting new events, or drop new events) + class ObservabilityPipelineBufferOptionsWhenFull + include BaseEnumModel + + BLOCK = "block".freeze + DROP_NEWEST = "drop_newest".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_crowd_strike_next_gen_siem_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_crowd_strike_next_gen_siem_destination.rb index 827d8cc8c390..a2f89abe4a0f 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_crowd_strike_next_gen_siem_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_crowd_strike_next_gen_siem_destination.rb @@ -21,6 +21,9 @@ module DatadogAPIClient::V2 class ObservabilityPipelineCrowdStrikeNextGenSiemDestination include BaseGenericModel + # Configuration for buffer settings on destination components. + attr_accessor :buffer + # Compression configuration for log events. attr_accessor :compression @@ -45,6 +48,7 @@ class ObservabilityPipelineCrowdStrikeNextGenSiemDestination # @!visibility private def self.attribute_map { + :'buffer' => :'buffer', :'compression' => :'compression', :'encoding' => :'encoding', :'id' => :'id', @@ -58,6 +62,7 @@ def self.attribute_map # @!visibility private def self.openapi_types { + :'buffer' => :'ObservabilityPipelineBufferOptions', :'compression' => :'ObservabilityPipelineCrowdStrikeNextGenSiemDestinationCompression', :'encoding' => :'ObservabilityPipelineCrowdStrikeNextGenSiemDestinationEncoding', :'id' => :'String', @@ -85,6 +90,10 @@ def initialize(attributes = {}) end } + if attributes.key?(:'buffer') + self.buffer = attributes[:'buffer'] + end + if attributes.key?(:'compression') self.compression = attributes[:'compression'] end @@ -189,6 +198,7 @@ def to_hash def ==(o) return true if self.equal?(o) self.class == o.class && + buffer == o.buffer && compression == o.compression && encoding == o.encoding && id == o.id && @@ -202,7 +212,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [compression, encoding, id, inputs, tls, type, additional_properties].hash + [buffer, compression, encoding, id, inputs, tls, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_datadog_logs_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_datadog_logs_destination.rb index 3b08d1fd9c22..91ea7f0d63d2 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_datadog_logs_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_datadog_logs_destination.rb @@ -21,6 +21,9 @@ module DatadogAPIClient::V2 class ObservabilityPipelineDatadogLogsDestination include BaseGenericModel + # Configuration for buffer settings on destination components. + attr_accessor :buffer + # The unique identifier for this component. attr_reader :id @@ -36,6 +39,7 @@ class ObservabilityPipelineDatadogLogsDestination # @!visibility private def self.attribute_map { + :'buffer' => :'buffer', :'id' => :'id', :'inputs' => :'inputs', :'type' => :'type' @@ -46,6 +50,7 @@ def self.attribute_map # @!visibility private def self.openapi_types { + :'buffer' => :'ObservabilityPipelineBufferOptions', :'id' => :'String', :'inputs' => :'Array', :'type' => :'ObservabilityPipelineDatadogLogsDestinationType' @@ -70,6 +75,10 @@ def initialize(attributes = {}) end } + if attributes.key?(:'buffer') + self.buffer = attributes[:'buffer'] + end + if attributes.key?(:'id') self.id = attributes[:'id'] end @@ -151,6 +160,7 @@ def to_hash def ==(o) return true if self.equal?(o) self.class == o.class && + buffer == o.buffer && id == o.id && inputs == o.inputs && type == o.type && @@ -161,7 +171,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [id, inputs, type, additional_properties].hash + [buffer, id, inputs, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_disk_buffer_options.rb b/lib/datadog_api_client/v2/models/observability_pipeline_disk_buffer_options.rb new file mode 100644 index 000000000000..44be529f2c88 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_disk_buffer_options.rb @@ -0,0 +1,125 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Options for configuring a disk buffer. + class ObservabilityPipelineDiskBufferOptions + include BaseGenericModel + + # Maximum size of the disk buffer. + attr_accessor :max_size + + # The type of the buffer that will be configured, a disk buffer. + attr_accessor :type + + # Behavior when the buffer is full (block and stop accepting new events, or drop new events) + attr_accessor :when_full + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'max_size' => :'max_size', + :'type' => :'type', + :'when_full' => :'when_full' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'max_size' => :'Integer', + :'type' => :'ObservabilityPipelineBufferOptionsDiskType', + :'when_full' => :'ObservabilityPipelineBufferOptionsWhenFull' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineDiskBufferOptions` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'max_size') + self.max_size = attributes[:'max_size'] + end + + if attributes.key?(:'type') + self.type = attributes[:'type'] + end + + if attributes.key?(:'when_full') + self.when_full = attributes[:'when_full'] + end + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + max_size == o.max_size && + type == o.type && + when_full == o.when_full && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [max_size, type, when_full, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_elasticsearch_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_elasticsearch_destination.rb index f49df443a047..678965a86003 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_elasticsearch_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_elasticsearch_destination.rb @@ -24,6 +24,9 @@ class ObservabilityPipelineElasticsearchDestination # The Elasticsearch API version to use. Set to `auto` to auto-detect. attr_accessor :api_version + # Configuration for buffer settings on destination components. + attr_accessor :buffer + # The index to write logs to in Elasticsearch. attr_accessor :bulk_index @@ -43,6 +46,7 @@ class ObservabilityPipelineElasticsearchDestination def self.attribute_map { :'api_version' => :'api_version', + :'buffer' => :'buffer', :'bulk_index' => :'bulk_index', :'id' => :'id', :'inputs' => :'inputs', @@ -55,6 +59,7 @@ def self.attribute_map def self.openapi_types { :'api_version' => :'ObservabilityPipelineElasticsearchDestinationApiVersion', + :'buffer' => :'ObservabilityPipelineBufferOptions', :'bulk_index' => :'String', :'id' => :'String', :'inputs' => :'Array', @@ -84,6 +89,10 @@ def initialize(attributes = {}) self.api_version = attributes[:'api_version'] end + if attributes.key?(:'buffer') + self.buffer = attributes[:'buffer'] + end + if attributes.key?(:'bulk_index') self.bulk_index = attributes[:'bulk_index'] end @@ -170,6 +179,7 @@ def ==(o) return true if self.equal?(o) self.class == o.class && api_version == o.api_version && + buffer == o.buffer && bulk_index == o.bulk_index && id == o.id && inputs == o.inputs && @@ -181,7 +191,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [api_version, bulk_index, id, inputs, type, additional_properties].hash + [api_version, buffer, bulk_index, id, inputs, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_google_chronicle_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_google_chronicle_destination.rb index 73b6a5d47df9..76220f5624a0 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_google_chronicle_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_google_chronicle_destination.rb @@ -24,6 +24,9 @@ class ObservabilityPipelineGoogleChronicleDestination # GCP credentials used to authenticate with Google Cloud Storage. attr_accessor :auth + # Configuration for buffer settings on destination components. + attr_accessor :buffer + # The Google Chronicle customer ID. attr_reader :customer_id @@ -49,6 +52,7 @@ class ObservabilityPipelineGoogleChronicleDestination def self.attribute_map { :'auth' => :'auth', + :'buffer' => :'buffer', :'customer_id' => :'customer_id', :'encoding' => :'encoding', :'id' => :'id', @@ -63,6 +67,7 @@ def self.attribute_map def self.openapi_types { :'auth' => :'ObservabilityPipelineGcpAuth', + :'buffer' => :'ObservabilityPipelineBufferOptions', :'customer_id' => :'String', :'encoding' => :'ObservabilityPipelineGoogleChronicleDestinationEncoding', :'id' => :'String', @@ -94,6 +99,10 @@ def initialize(attributes = {}) self.auth = attributes[:'auth'] end + if attributes.key?(:'buffer') + self.buffer = attributes[:'buffer'] + end + if attributes.key?(:'customer_id') self.customer_id = attributes[:'customer_id'] end @@ -199,6 +208,7 @@ def ==(o) return true if self.equal?(o) self.class == o.class && auth == o.auth && + buffer == o.buffer && customer_id == o.customer_id && encoding == o.encoding && id == o.id && @@ -212,7 +222,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [auth, customer_id, encoding, id, inputs, log_type, type, additional_properties].hash + [auth, buffer, customer_id, encoding, id, inputs, log_type, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_google_cloud_storage_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_google_cloud_storage_destination.rb index ae95f0729d98..7ae46462cf0d 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_google_cloud_storage_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_google_cloud_storage_destination.rb @@ -31,6 +31,9 @@ class ObservabilityPipelineGoogleCloudStorageDestination # Name of the GCS bucket. attr_reader :bucket + # Configuration for buffer settings on destination components. + attr_accessor :buffer + # Unique identifier for the destination component. attr_reader :id @@ -58,6 +61,7 @@ def self.attribute_map :'acl' => :'acl', :'auth' => :'auth', :'bucket' => :'bucket', + :'buffer' => :'buffer', :'id' => :'id', :'inputs' => :'inputs', :'key_prefix' => :'key_prefix', @@ -74,6 +78,7 @@ def self.openapi_types :'acl' => :'ObservabilityPipelineGoogleCloudStorageDestinationAcl', :'auth' => :'ObservabilityPipelineGcpAuth', :'bucket' => :'String', + :'buffer' => :'ObservabilityPipelineBufferOptions', :'id' => :'String', :'inputs' => :'Array', :'key_prefix' => :'String', @@ -113,6 +118,10 @@ def initialize(attributes = {}) self.bucket = attributes[:'bucket'] end + if attributes.key?(:'buffer') + self.buffer = attributes[:'buffer'] + end + if attributes.key?(:'id') self.id = attributes[:'id'] end @@ -233,6 +242,7 @@ def ==(o) acl == o.acl && auth == o.auth && bucket == o.bucket && + buffer == o.buffer && id == o.id && inputs == o.inputs && key_prefix == o.key_prefix && @@ -246,7 +256,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [acl, auth, bucket, id, inputs, key_prefix, metadata, storage_class, type, additional_properties].hash + [acl, auth, bucket, buffer, id, inputs, key_prefix, metadata, storage_class, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_google_pub_sub_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_google_pub_sub_destination.rb index fe396691e124..eee9118ff292 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_google_pub_sub_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_google_pub_sub_destination.rb @@ -24,6 +24,9 @@ class ObservabilityPipelineGooglePubSubDestination # GCP credentials used to authenticate with Google Cloud Storage. attr_accessor :auth + # Configuration for buffer settings on destination components. + attr_accessor :buffer + # Encoding format for log events. attr_reader :encoding @@ -52,6 +55,7 @@ class ObservabilityPipelineGooglePubSubDestination def self.attribute_map { :'auth' => :'auth', + :'buffer' => :'buffer', :'encoding' => :'encoding', :'id' => :'id', :'inputs' => :'inputs', @@ -67,6 +71,7 @@ def self.attribute_map def self.openapi_types { :'auth' => :'ObservabilityPipelineGcpAuth', + :'buffer' => :'ObservabilityPipelineBufferOptions', :'encoding' => :'ObservabilityPipelineGooglePubSubDestinationEncoding', :'id' => :'String', :'inputs' => :'Array', @@ -99,6 +104,10 @@ def initialize(attributes = {}) self.auth = attributes[:'auth'] end + if attributes.key?(:'buffer') + self.buffer = attributes[:'buffer'] + end + if attributes.key?(:'encoding') self.encoding = attributes[:'encoding'] end @@ -230,6 +239,7 @@ def ==(o) return true if self.equal?(o) self.class == o.class && auth == o.auth && + buffer == o.buffer && encoding == o.encoding && id == o.id && inputs == o.inputs && @@ -244,7 +254,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [auth, encoding, id, inputs, project, tls, topic, type, additional_properties].hash + [auth, buffer, encoding, id, inputs, project, tls, topic, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_memory_buffer_options.rb b/lib/datadog_api_client/v2/models/observability_pipeline_memory_buffer_options.rb new file mode 100644 index 000000000000..f5e41639de69 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_memory_buffer_options.rb @@ -0,0 +1,115 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Options for configuring a memory buffer by byte size. + class ObservabilityPipelineMemoryBufferOptions + include BaseGenericModel + + # Maximum size of the disk buffer. + attr_accessor :max_size + + # The type of the buffer that will be configured, a memory buffer. + attr_accessor :type + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'max_size' => :'max_size', + :'type' => :'type' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'max_size' => :'Integer', + :'type' => :'ObservabilityPipelineBufferOptionsMemoryType' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineMemoryBufferOptions` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'max_size') + self.max_size = attributes[:'max_size'] + end + + if attributes.key?(:'type') + self.type = attributes[:'type'] + end + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + max_size == o.max_size && + type == o.type && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [max_size, type, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_memory_buffer_size_options.rb b/lib/datadog_api_client/v2/models/observability_pipeline_memory_buffer_size_options.rb new file mode 100644 index 000000000000..04f5e0b68aa2 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_memory_buffer_size_options.rb @@ -0,0 +1,115 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Options for configuring a memory buffer by queue length. + class ObservabilityPipelineMemoryBufferSizeOptions + include BaseGenericModel + + # Maximum events for the memory buffer. + attr_accessor :max_events + + # The type of the buffer that will be configured, a memory buffer. + attr_accessor :type + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'max_events' => :'max_events', + :'type' => :'type' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'max_events' => :'Integer', + :'type' => :'ObservabilityPipelineBufferOptionsMemoryType' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineMemoryBufferSizeOptions` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'max_events') + self.max_events = attributes[:'max_events'] + end + + if attributes.key?(:'type') + self.type = attributes[:'type'] + end + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + max_events == o.max_events && + type == o.type && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [max_events, type, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_new_relic_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_new_relic_destination.rb index d8705a24d962..eae6ee6e3d24 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_new_relic_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_new_relic_destination.rb @@ -21,6 +21,9 @@ module DatadogAPIClient::V2 class ObservabilityPipelineNewRelicDestination include BaseGenericModel + # Configuration for buffer settings on destination components. + attr_accessor :buffer + # The unique identifier for this component. attr_reader :id @@ -39,6 +42,7 @@ class ObservabilityPipelineNewRelicDestination # @!visibility private def self.attribute_map { + :'buffer' => :'buffer', :'id' => :'id', :'inputs' => :'inputs', :'region' => :'region', @@ -50,6 +54,7 @@ def self.attribute_map # @!visibility private def self.openapi_types { + :'buffer' => :'ObservabilityPipelineBufferOptions', :'id' => :'String', :'inputs' => :'Array', :'region' => :'ObservabilityPipelineNewRelicDestinationRegion', @@ -75,6 +80,10 @@ def initialize(attributes = {}) end } + if attributes.key?(:'buffer') + self.buffer = attributes[:'buffer'] + end + if attributes.key?(:'id') self.id = attributes[:'id'] end @@ -171,6 +180,7 @@ def to_hash def ==(o) return true if self.equal?(o) self.class == o.class && + buffer == o.buffer && id == o.id && inputs == o.inputs && region == o.region && @@ -182,7 +192,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [id, inputs, region, type, additional_properties].hash + [buffer, id, inputs, region, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_open_search_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_open_search_destination.rb index 2120c2eef855..4c1adfbc8590 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_open_search_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_open_search_destination.rb @@ -21,6 +21,9 @@ module DatadogAPIClient::V2 class ObservabilityPipelineOpenSearchDestination include BaseGenericModel + # Configuration for buffer settings on destination components. + attr_accessor :buffer + # The index to write logs to. attr_accessor :bulk_index @@ -39,6 +42,7 @@ class ObservabilityPipelineOpenSearchDestination # @!visibility private def self.attribute_map { + :'buffer' => :'buffer', :'bulk_index' => :'bulk_index', :'id' => :'id', :'inputs' => :'inputs', @@ -50,6 +54,7 @@ def self.attribute_map # @!visibility private def self.openapi_types { + :'buffer' => :'ObservabilityPipelineBufferOptions', :'bulk_index' => :'String', :'id' => :'String', :'inputs' => :'Array', @@ -75,6 +80,10 @@ def initialize(attributes = {}) end } + if attributes.key?(:'buffer') + self.buffer = attributes[:'buffer'] + end + if attributes.key?(:'bulk_index') self.bulk_index = attributes[:'bulk_index'] end @@ -160,6 +169,7 @@ def to_hash def ==(o) return true if self.equal?(o) self.class == o.class && + buffer == o.buffer && bulk_index == o.bulk_index && id == o.id && inputs == o.inputs && @@ -171,7 +181,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [bulk_index, id, inputs, type, additional_properties].hash + [buffer, bulk_index, id, inputs, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_rsyslog_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_rsyslog_destination.rb index 4c15ba87ef60..fed60c0335e7 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_rsyslog_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_rsyslog_destination.rb @@ -21,6 +21,9 @@ module DatadogAPIClient::V2 class ObservabilityPipelineRsyslogDestination include BaseGenericModel + # Configuration for buffer settings on destination components. + attr_accessor :buffer + # The unique identifier for this component. attr_reader :id @@ -42,6 +45,7 @@ class ObservabilityPipelineRsyslogDestination # @!visibility private def self.attribute_map { + :'buffer' => :'buffer', :'id' => :'id', :'inputs' => :'inputs', :'keepalive' => :'keepalive', @@ -54,6 +58,7 @@ def self.attribute_map # @!visibility private def self.openapi_types { + :'buffer' => :'ObservabilityPipelineBufferOptions', :'id' => :'String', :'inputs' => :'Array', :'keepalive' => :'Integer', @@ -80,6 +85,10 @@ def initialize(attributes = {}) end } + if attributes.key?(:'buffer') + self.buffer = attributes[:'buffer'] + end + if attributes.key?(:'id') self.id = attributes[:'id'] end @@ -180,6 +189,7 @@ def to_hash def ==(o) return true if self.equal?(o) self.class == o.class && + buffer == o.buffer && id == o.id && inputs == o.inputs && keepalive == o.keepalive && @@ -192,7 +202,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [id, inputs, keepalive, tls, type, additional_properties].hash + [buffer, id, inputs, keepalive, tls, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_sentinel_one_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_sentinel_one_destination.rb index 9d0af0150f23..ac4c5bf690f2 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_sentinel_one_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_sentinel_one_destination.rb @@ -21,6 +21,9 @@ module DatadogAPIClient::V2 class ObservabilityPipelineSentinelOneDestination include BaseGenericModel + # Configuration for buffer settings on destination components. + attr_accessor :buffer + # The unique identifier for this component. attr_reader :id @@ -39,6 +42,7 @@ class ObservabilityPipelineSentinelOneDestination # @!visibility private def self.attribute_map { + :'buffer' => :'buffer', :'id' => :'id', :'inputs' => :'inputs', :'region' => :'region', @@ -50,6 +54,7 @@ def self.attribute_map # @!visibility private def self.openapi_types { + :'buffer' => :'ObservabilityPipelineBufferOptions', :'id' => :'String', :'inputs' => :'Array', :'region' => :'ObservabilityPipelineSentinelOneDestinationRegion', @@ -75,6 +80,10 @@ def initialize(attributes = {}) end } + if attributes.key?(:'buffer') + self.buffer = attributes[:'buffer'] + end + if attributes.key?(:'id') self.id = attributes[:'id'] end @@ -171,6 +180,7 @@ def to_hash def ==(o) return true if self.equal?(o) self.class == o.class && + buffer == o.buffer && id == o.id && inputs == o.inputs && region == o.region && @@ -182,7 +192,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [id, inputs, region, type, additional_properties].hash + [buffer, id, inputs, region, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_socket_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_socket_destination.rb index bbe025820793..35f3195b35ff 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_socket_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_socket_destination.rb @@ -21,6 +21,9 @@ module DatadogAPIClient::V2 class ObservabilityPipelineSocketDestination include BaseGenericModel + # Configuration for buffer settings on destination components. + attr_accessor :buffer + # Encoding format for log events. attr_reader :encoding @@ -48,6 +51,7 @@ class ObservabilityPipelineSocketDestination # @!visibility private def self.attribute_map { + :'buffer' => :'buffer', :'encoding' => :'encoding', :'framing' => :'framing', :'id' => :'id', @@ -62,6 +66,7 @@ def self.attribute_map # @!visibility private def self.openapi_types { + :'buffer' => :'ObservabilityPipelineBufferOptions', :'encoding' => :'ObservabilityPipelineSocketDestinationEncoding', :'framing' => :'ObservabilityPipelineSocketDestinationFraming', :'id' => :'String', @@ -90,6 +95,10 @@ def initialize(attributes = {}) end } + if attributes.key?(:'buffer') + self.buffer = attributes[:'buffer'] + end + if attributes.key?(:'encoding') self.encoding = attributes[:'encoding'] end @@ -220,6 +229,7 @@ def to_hash def ==(o) return true if self.equal?(o) self.class == o.class && + buffer == o.buffer && encoding == o.encoding && framing == o.framing && id == o.id && @@ -234,7 +244,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [encoding, framing, id, inputs, mode, tls, type, additional_properties].hash + [buffer, encoding, framing, id, inputs, mode, tls, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_splunk_hec_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_splunk_hec_destination.rb index dd77cfec9255..e3f5403eade0 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_splunk_hec_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_splunk_hec_destination.rb @@ -25,6 +25,9 @@ class ObservabilityPipelineSplunkHecDestination # If `false`, Splunk assigns the time the event was received. attr_accessor :auto_extract_timestamp + # Configuration for buffer settings on destination components. + attr_accessor :buffer + # Encoding format for log events. attr_accessor :encoding @@ -50,6 +53,7 @@ class ObservabilityPipelineSplunkHecDestination def self.attribute_map { :'auto_extract_timestamp' => :'auto_extract_timestamp', + :'buffer' => :'buffer', :'encoding' => :'encoding', :'id' => :'id', :'index' => :'index', @@ -64,6 +68,7 @@ def self.attribute_map def self.openapi_types { :'auto_extract_timestamp' => :'Boolean', + :'buffer' => :'ObservabilityPipelineBufferOptions', :'encoding' => :'ObservabilityPipelineSplunkHecDestinationEncoding', :'id' => :'String', :'index' => :'String', @@ -95,6 +100,10 @@ def initialize(attributes = {}) self.auto_extract_timestamp = attributes[:'auto_extract_timestamp'] end + if attributes.key?(:'buffer') + self.buffer = attributes[:'buffer'] + end + if attributes.key?(:'encoding') self.encoding = attributes[:'encoding'] end @@ -189,6 +198,7 @@ def ==(o) return true if self.equal?(o) self.class == o.class && auto_extract_timestamp == o.auto_extract_timestamp && + buffer == o.buffer && encoding == o.encoding && id == o.id && index == o.index && @@ -202,7 +212,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [auto_extract_timestamp, encoding, id, index, inputs, sourcetype, type, additional_properties].hash + [auto_extract_timestamp, buffer, encoding, id, index, inputs, sourcetype, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_sumo_logic_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_sumo_logic_destination.rb index 6193fb57f020..ebaf3cf1b271 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_sumo_logic_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_sumo_logic_destination.rb @@ -21,6 +21,9 @@ module DatadogAPIClient::V2 class ObservabilityPipelineSumoLogicDestination include BaseGenericModel + # Configuration for buffer settings on destination components. + attr_accessor :buffer + # The output encoding format. attr_accessor :encoding @@ -51,6 +54,7 @@ class ObservabilityPipelineSumoLogicDestination # @!visibility private def self.attribute_map { + :'buffer' => :'buffer', :'encoding' => :'encoding', :'header_custom_fields' => :'header_custom_fields', :'header_host_name' => :'header_host_name', @@ -66,6 +70,7 @@ def self.attribute_map # @!visibility private def self.openapi_types { + :'buffer' => :'ObservabilityPipelineBufferOptions', :'encoding' => :'ObservabilityPipelineSumoLogicDestinationEncoding', :'header_custom_fields' => :'Array', :'header_host_name' => :'String', @@ -95,6 +100,10 @@ def initialize(attributes = {}) end } + if attributes.key?(:'buffer') + self.buffer = attributes[:'buffer'] + end + if attributes.key?(:'encoding') self.encoding = attributes[:'encoding'] end @@ -198,6 +207,7 @@ def to_hash def ==(o) return true if self.equal?(o) self.class == o.class && + buffer == o.buffer && encoding == o.encoding && header_custom_fields == o.header_custom_fields && header_host_name == o.header_host_name && @@ -213,7 +223,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [encoding, header_custom_fields, header_host_name, header_source_category, header_source_name, id, inputs, type, additional_properties].hash + [buffer, encoding, header_custom_fields, header_host_name, header_source_category, header_source_name, id, inputs, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_syslog_ng_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_syslog_ng_destination.rb index 045dcfe90403..b2242d7eecdd 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_syslog_ng_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_syslog_ng_destination.rb @@ -21,6 +21,9 @@ module DatadogAPIClient::V2 class ObservabilityPipelineSyslogNgDestination include BaseGenericModel + # Configuration for buffer settings on destination components. + attr_accessor :buffer + # The unique identifier for this component. attr_reader :id @@ -42,6 +45,7 @@ class ObservabilityPipelineSyslogNgDestination # @!visibility private def self.attribute_map { + :'buffer' => :'buffer', :'id' => :'id', :'inputs' => :'inputs', :'keepalive' => :'keepalive', @@ -54,6 +58,7 @@ def self.attribute_map # @!visibility private def self.openapi_types { + :'buffer' => :'ObservabilityPipelineBufferOptions', :'id' => :'String', :'inputs' => :'Array', :'keepalive' => :'Integer', @@ -80,6 +85,10 @@ def initialize(attributes = {}) end } + if attributes.key?(:'buffer') + self.buffer = attributes[:'buffer'] + end + if attributes.key?(:'id') self.id = attributes[:'id'] end @@ -180,6 +189,7 @@ def to_hash def ==(o) return true if self.equal?(o) self.class == o.class && + buffer == o.buffer && id == o.id && inputs == o.inputs && keepalive == o.keepalive && @@ -192,7 +202,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [id, inputs, keepalive, tls, type, additional_properties].hash + [buffer, id, inputs, keepalive, tls, type, additional_properties].hash end end end