diff --git a/.generator/schemas/v2/openapi.yaml b/.generator/schemas/v2/openapi.yaml index d2192b74841f..dbe6ebb4cdce 100644 --- a/.generator/schemas/v2/openapi.yaml +++ b/.generator/schemas/v2/openapi.yaml @@ -35178,6 +35178,43 @@ components: type: string x-enum-varnames: - ADD_FIELDS + ObservabilityPipelineAddHostnameProcessor: + description: The `add_hostname` processor adds the hostname to log events. + properties: + display_name: + $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' + enabled: + description: Whether this processor is enabled. + example: true + type: boolean + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (for example, as the `input` + to downstream components). + example: add-hostname-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + type: + $ref: '#/components/schemas/ObservabilityPipelineAddHostnameProcessorType' + required: + - id + - type + - include + - enabled + type: object + ObservabilityPipelineAddHostnameProcessorType: + default: add_hostname + description: The processor type. The value should always be `add_hostname`. + enum: + - add_hostname + example: add_hostname + type: string + x-enum-varnames: + - ADD_HOSTNAME ObservabilityPipelineAmazonDataFirehoseSource: description: The `amazon_data_firehose` source ingests logs from AWS Data Firehose. properties: @@ -35455,6 +35492,37 @@ components: role session. type: string type: object + ObservabilityPipelineCloudPremDestination: + description: The `cloud_prem` destination sends logs to Datadog CloudPrem. + properties: + id: + description: The unique identifier for this component. + example: cloud-prem-destination + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - filter-processor + items: + type: string + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineCloudPremDestinationType' + required: + - id + - type + - inputs + type: object + ObservabilityPipelineCloudPremDestinationType: + default: cloud_prem + description: The destination type. The value should always be `cloud_prem`. + enum: + - cloud_prem + example: cloud_prem + type: string + x-enum-varnames: + - CLOUD_PREM ObservabilityPipelineComponentDisplayName: description: The display name for a component. example: my component @@ -35473,7 +35541,7 @@ components: items: $ref: '#/components/schemas/ObservabilityPipelineConfigDestinationItem' type: array - processors: + processor_groups: description: A list of processor groups that transform or enrich log data. example: - enabled: true @@ -35510,6 +35578,7 @@ components: description: A destination for the pipeline. oneOf: - $ref: '#/components/schemas/ObservabilityPipelineDatadogLogsDestination' + - $ref: '#/components/schemas/ObservabilityPipelineCloudPremDestination' - $ref: '#/components/schemas/ObservabilityPipelineAmazonS3Destination' - $ref: '#/components/schemas/ObservabilityPipelineGoogleCloudStorageDestination' - $ref: '#/components/schemas/ObservabilityPipelineSplunkHecDestination' @@ -35528,6 +35597,7 @@ components: - $ref: '#/components/schemas/ObservabilityPipelineAmazonSecurityLakeDestination' - $ref: '#/components/schemas/ObservabilityPipelineCrowdStrikeNextGenSiemDestination' - $ref: '#/components/schemas/ObservabilityPipelineGooglePubSubDestination' + - $ref: '#/components/schemas/ObservabilityPipelineKafkaDestination' ObservabilityPipelineConfigProcessorGroup: description: A group of processors. example: @@ -35600,24 +35670,27 @@ components: ObservabilityPipelineConfigProcessorItem: description: A processor for the pipeline. oneOf: + - $ref: '#/components/schemas/ObservabilityPipelineAddEnvVarsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineAddFieldsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineAddHostnameProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineCustomProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineDatadogTagsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineDedupeProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableProcessor' - $ref: '#/components/schemas/ObservabilityPipelineFilterProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineGenerateMetricsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineOcsfMapperProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineParseGrokProcessor' - $ref: '#/components/schemas/ObservabilityPipelineParseJSONProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineParseXMLProcessor' - $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineAddFieldsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineReduceProcessor' - $ref: '#/components/schemas/ObservabilityPipelineRemoveFieldsProcessor' - $ref: '#/components/schemas/ObservabilityPipelineRenameFieldsProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineGenerateMetricsProcessor' - $ref: '#/components/schemas/ObservabilityPipelineSampleProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineParseGrokProcessor' - $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineOcsfMapperProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineAddEnvVarsProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineDedupeProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineReduceProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineSplitArrayProcessor' - $ref: '#/components/schemas/ObservabilityPipelineThrottleProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineCustomProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineDatadogTagsProcessor' ObservabilityPipelineConfigSourceItem: description: A data source for the pipeline. oneOf: @@ -35637,6 +35710,7 @@ components: - $ref: '#/components/schemas/ObservabilityPipelineHttpClientSource' - $ref: '#/components/schemas/ObservabilityPipelineLogstashSource' - $ref: '#/components/schemas/ObservabilityPipelineSocketSource' + - $ref: '#/components/schemas/ObservabilityPipelineOpentelemetrySource' ObservabilityPipelineCrowdStrikeNextGenSiemDestination: description: The `crowdstrike_next_gen_siem` destination forwards logs to CrowdStrike Next Gen SIEM. @@ -36030,6 +36104,8 @@ components: description: The index to write logs to in Elasticsearch. example: logs-index type: string + data_stream: + $ref: '#/components/schemas/ObservabilityPipelineElasticsearchDestinationDataStream' id: description: The unique identifier for this component. example: elasticsearch-destination @@ -36063,6 +36139,23 @@ components: - V6 - V7 - V8 + ObservabilityPipelineElasticsearchDestinationDataStream: + description: Configuration options for writing to Elasticsearch Data Streams + instead of a fixed index. + properties: + dataset: + description: The data stream dataset for your logs. This groups logs by + their source or application. + type: string + dtype: + description: The data stream type for your logs. This determines how logs + are categorized within the data stream. + type: string + namespace: + description: The data stream namespace for your logs. This separates logs + into different environments or domains. + type: string + type: object ObservabilityPipelineElasticsearchDestinationType: default: elasticsearch description: The destination type. The value should always be `elasticsearch`. @@ -36203,7 +36296,8 @@ components: type: object ObservabilityPipelineEnrichmentTableProcessor: description: The `enrichment_table` processor enriches logs using a static CSV - file or GeoIP database. + file, GeoIP database, or reference table. Exactly one of `file`, `geoip`, + or `reference_table` must be configured. properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -36224,6 +36318,8 @@ components: targets. example: source:my-source type: string + reference_table: + $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableReferenceTable' target: description: Path where enrichment results should be stored in the log. example: enriched.geoip @@ -36246,6 +36342,28 @@ components: type: string x-enum-varnames: - ENRICHMENT_TABLE + ObservabilityPipelineEnrichmentTableReferenceTable: + description: Uses a Datadog reference table to enrich logs. + properties: + columns: + description: List of column names to include from the reference table. If + not provided, all columns are included. + items: + type: string + type: array + key_field: + description: Path to the field in the log event to match against the reference + table. + example: log.user.id + type: string + table_id: + description: The unique identifier of the reference table. + example: 550e8400-e29b-41d4-a716-446655440000 + type: string + required: + - key_field + - table_id + type: object ObservabilityPipelineFieldValue: description: Represents a static key-value pair used in various processors. properties: @@ -36826,6 +36944,151 @@ components: type: string x-enum-varnames: - HTTP_SERVER + ObservabilityPipelineKafkaDestination: + description: The `kafka` destination sends logs to Apache Kafka topics. + properties: + compression: + $ref: '#/components/schemas/ObservabilityPipelineKafkaDestinationCompression' + encoding: + $ref: '#/components/schemas/ObservabilityPipelineKafkaDestinationEncoding' + headers_key: + description: The field name to use for Kafka message headers. + example: headers + type: string + id: + description: The unique identifier for this component. + example: kafka-destination + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - filter-processor + items: + type: string + type: array + key_field: + description: The field name to use as the Kafka message key. + example: message_id + type: string + librdkafka_options: + description: Optional list of advanced Kafka producer configuration options, + defined as key-value pairs. + items: + $ref: '#/components/schemas/ObservabilityPipelineKafkaLibrdkafkaOption' + type: array + message_timeout_ms: + description: Maximum time in milliseconds to wait for message delivery confirmation. + example: 300000 + format: int64 + minimum: 1 + type: integer + rate_limit_duration_secs: + description: Duration in seconds for the rate limit window. + example: 1 + format: int64 + minimum: 1 + type: integer + rate_limit_num: + description: Maximum number of messages allowed per rate limit duration. + example: 1000 + format: int64 + minimum: 1 + type: integer + sasl: + $ref: '#/components/schemas/ObservabilityPipelineKafkaSasl' + socket_timeout_ms: + description: Socket timeout in milliseconds for network requests. + example: 60000 + format: int64 + maximum: 300000 + minimum: 10 + type: integer + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + topic: + description: The Kafka topic name to publish logs to. + example: logs-topic + type: string + type: + $ref: '#/components/schemas/ObservabilityPipelineKafkaDestinationType' + required: + - id + - type + - inputs + - topic + - encoding + type: object + ObservabilityPipelineKafkaDestinationCompression: + description: Compression codec for Kafka messages. + enum: + - none + - gzip + - snappy + - lz4 + - zstd + example: gzip + type: string + x-enum-varnames: + - NONE + - GZIP + - SNAPPY + - LZ4 + - ZSTD + ObservabilityPipelineKafkaDestinationEncoding: + description: Encoding format for log events. + enum: + - json + - raw_message + example: json + type: string + x-enum-varnames: + - JSON + - RAW_MESSAGE + ObservabilityPipelineKafkaDestinationType: + default: kafka + description: The destination type. The value should always be `kafka`. + enum: + - kafka + example: kafka + type: string + x-enum-varnames: + - KAFKA + ObservabilityPipelineKafkaLibrdkafkaOption: + description: Represents a key-value pair used to configure low-level `librdkafka` + client options for Kafka source and destination, such as timeouts, buffer + sizes, and security settings. + properties: + name: + description: The name of the `librdkafka` configuration option to set. + example: fetch.message.max.bytes + type: string + value: + description: The value assigned to the specified `librdkafka` configuration + option. + example: '1048576' + type: string + required: + - name + - value + type: object + ObservabilityPipelineKafkaSasl: + description: Specifies the SASL mechanism for authenticating with a Kafka cluster. + properties: + mechanism: + $ref: '#/components/schemas/ObservabilityPipelineKafkaSaslMechanism' + type: object + ObservabilityPipelineKafkaSaslMechanism: + description: SASL mechanism used for Kafka authentication. + enum: + - PLAIN + - SCRAM-SHA-256 + - SCRAM-SHA-512 + type: string + x-enum-varnames: + - PLAIN + - SCRAMNOT_SHANOT_256 + - SCRAMNOT_SHANOT_512 ObservabilityPipelineKafkaSource: description: The `kafka` source ingests data from Apache Kafka topics. properties: @@ -36843,10 +37106,10 @@ components: description: Optional list of advanced Kafka client configuration options, defined as key-value pairs. items: - $ref: '#/components/schemas/ObservabilityPipelineKafkaSourceLibrdkafkaOption' + $ref: '#/components/schemas/ObservabilityPipelineKafkaLibrdkafkaOption' type: array sasl: - $ref: '#/components/schemas/ObservabilityPipelineKafkaSourceSasl' + $ref: '#/components/schemas/ObservabilityPipelineKafkaSasl' tls: $ref: '#/components/schemas/ObservabilityPipelineTls' topics: @@ -36866,30 +37129,6 @@ components: - group_id - topics type: object - ObservabilityPipelineKafkaSourceLibrdkafkaOption: - description: Represents a key-value pair used to configure low-level `librdkafka` - client options for Kafka sources, such as timeouts, buffer sizes, and security - settings. - properties: - name: - description: The name of the `librdkafka` configuration option to set. - example: fetch.message.max.bytes - type: string - value: - description: The value assigned to the specified `librdkafka` configuration - option. - example: '1048576' - type: string - required: - - name - - value - type: object - ObservabilityPipelineKafkaSourceSasl: - description: Specifies the SASL mechanism for authenticating with a Kafka cluster. - properties: - mechanism: - $ref: '#/components/schemas/ObservabilityPipelinePipelineKafkaSourceSaslMechanism' - type: object ObservabilityPipelineKafkaSourceType: default: kafka description: The source type. The value should always be `kafka`. @@ -37116,6 +37355,45 @@ components: type: string x-enum-varnames: - OPENSEARCH + ObservabilityPipelineOpentelemetrySource: + description: The `opentelemetry` source receives telemetry data using the OpenTelemetry + Protocol (OTLP) over gRPC and HTTP. + properties: + grpc_address_key: + description: Environment variable name containing the gRPC server address + for receiving OTLP data. Must be a valid environment variable name (alphanumeric + characters and underscores only). + example: OTEL_GRPC_ADDRESS + type: string + http_address_key: + description: Environment variable name containing the HTTP server address + for receiving OTLP data. Must be a valid environment variable name (alphanumeric + characters and underscores only). + example: OTEL_HTTP_ADDRESS + type: string + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (e.g., as input to downstream + components). + example: opentelemetry-source + type: string + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + type: + $ref: '#/components/schemas/ObservabilityPipelineOpentelemetrySourceType' + required: + - id + - type + type: object + ObservabilityPipelineOpentelemetrySourceType: + default: opentelemetry + description: The source type. The value should always be `opentelemetry`. + enum: + - opentelemetry + example: opentelemetry + type: string + x-enum-varnames: + - OPENTELEMETRY ObservabilityPipelineParseGrokProcessor: description: The `parse_grok` processor extracts structured fields from unstructured log messages using Grok patterns. @@ -37279,17 +37557,72 @@ components: type: string x-enum-varnames: - PARSE_JSON - ObservabilityPipelinePipelineKafkaSourceSaslMechanism: - description: SASL mechanism used for Kafka authentication. + ObservabilityPipelineParseXMLProcessor: + description: The `parse_xml` processor parses XML from a specified field and + extracts it into the event. + properties: + always_use_text_key: + description: Whether to always use a text key for element content. + type: boolean + attr_prefix: + description: The prefix to use for XML attributes in the parsed output. + type: string + display_name: + $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' + enabled: + description: Whether this processor is enabled. + example: true + type: boolean + field: + description: The name of the log field that contains an XML string. + example: message + type: string + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (for example, as the `input` + to downstream components). + example: parse-xml-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + include_attr: + description: Whether to include XML attributes in the parsed output. + type: boolean + parse_bool: + description: Whether to parse boolean values from strings. + type: boolean + parse_null: + description: Whether to parse null values. + type: boolean + parse_number: + description: Whether to parse numeric values from strings. + type: boolean + text_key: + description: The key name to use for text content within XML elements. Must + be at least 1 character if specified. + minLength: 1 + type: string + type: + $ref: '#/components/schemas/ObservabilityPipelineParseXMLProcessorType' + required: + - id + - type + - include + - field + - enabled + type: object + ObservabilityPipelineParseXMLProcessorType: + default: parse_xml + description: The processor type. The value should always be `parse_xml`. enum: - - PLAIN - - SCRAM-SHA-256 - - SCRAM-SHA-512 + - parse_xml + example: parse_xml type: string x-enum-varnames: - - PLAIN - - SCRAMNOT_SHANOT_256 - - SCRAMNOT_SHANOT_512 + - PARSE_XML ObservabilityPipelineQuotaProcessor: description: The Quota Processor measures logging traffic for logs that match a specified filter. When the configured daily quota is met, the processor @@ -37298,9 +37631,10 @@ components: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' drop_events: - description: If set to `true`, logs that matched the quota filter and sent - after the quota has been met are dropped; only logs that did not match - the filter query continue through the pipeline. + description: 'If set to `true`, logs that match the quota filter and are + sent after the quota is exceeded are dropped. Logs that do not match the + filter continue through the pipeline. **Note**: You can set either `drop_events` + or `overflow_action`, but not both.' example: false type: boolean enabled: @@ -37344,6 +37678,8 @@ components: items: type: string type: array + too_many_buckets_action: + $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorOverflowAction' type: $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorType' required: @@ -37381,7 +37717,8 @@ components: - BYTES - EVENTS ObservabilityPipelineQuotaProcessorOverflowAction: - description: 'The action to take when the quota is exceeded. Options: + description: 'The action to take when the quota or bucket limit is exceeded. + Options: - `drop`: Drop the event. @@ -37711,6 +38048,16 @@ components: description: Whether this processor is enabled. example: true type: boolean + group_by: + description: Optional list of fields to group events by. Each group is sampled + independently. + example: + - service + - host + items: + type: string + minItems: 1 + type: array id: description: The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` @@ -37727,18 +38074,13 @@ components: example: 10.0 format: double type: number - rate: - description: Number of events to sample (1 in N). - example: 10 - format: int64 - minimum: 1 - type: integer type: $ref: '#/components/schemas/ObservabilityPipelineSampleProcessorType' required: - id - type - include + - percentage - enabled type: object ObservabilityPipelineSampleProcessorType: @@ -37907,6 +38249,11 @@ components: ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions: description: Options for defining a custom regex pattern. properties: + description: + description: Human-readable description providing context about a sensitive + data scanner rule + example: "Custom regex for internal API\u202Fkeys" + type: string rule: description: A regular expression used to detect sensitive values. Must be a valid regex. @@ -37962,6 +38309,11 @@ components: description: Options for selecting a predefined library pattern and enabling keyword support. properties: + description: + description: Human-readable description providing context about a sensitive + data scanner rule + example: Credit card pattern + type: string id: description: Identifier for a predefined pattern from the sensitive data scanner pattern library. @@ -38442,6 +38794,68 @@ components: - type - attributes type: object + ObservabilityPipelineSplitArrayProcessor: + description: The `split_array` processor splits array fields into separate events + based on configured rules. + properties: + arrays: + description: A list of array split configurations. + items: + $ref: '#/components/schemas/ObservabilityPipelineSplitArrayProcessorArrayConfig' + maxItems: 15 + minItems: 1 + type: array + display_name: + $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' + enabled: + description: Whether this processor is enabled. + example: true + type: boolean + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (for example, as the `input` + to downstream components). + example: split-array-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. For split_array, this should typically be `*`. + example: '*' + type: string + type: + $ref: '#/components/schemas/ObservabilityPipelineSplitArrayProcessorType' + required: + - id + - type + - include + - arrays + - enabled + type: object + ObservabilityPipelineSplitArrayProcessorArrayConfig: + description: Configuration for a single array split operation. + properties: + field: + description: The path to the array field to split. + example: tags + type: string + include: + description: A Datadog search query used to determine which logs this array + split operation targets. + example: '*' + type: string + required: + - include + - field + type: object + ObservabilityPipelineSplitArrayProcessorType: + default: split_array + description: The processor type. The value should always be `split_array`. + enum: + - split_array + example: split_array + type: string + x-enum-varnames: + - SPLIT_ARRAY ObservabilityPipelineSplunkHecDestination: description: The `splunk_hec` destination forwards logs to Splunk using the HTTP Event Collector (HEC). @@ -75879,6 +76293,103 @@ paths: summary: Get all aggregated DNS traffic tags: - Cloud Network Monitoring + /api/v2/obs-pipelines/pipelines: + get: + description: Retrieve a list of pipelines. + operationId: ListPipelines + parameters: + - $ref: '#/components/parameters/PageSize' + - $ref: '#/components/parameters/PageNumber' + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/ListPipelinesResponse' + description: OK + '400': + $ref: '#/components/responses/BadRequestResponse' + '403': + $ref: '#/components/responses/NotAuthorizedResponse' + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + summary: List pipelines + tags: + - Observability Pipelines + x-permission: + operator: OR + permissions: + - observability_pipelines_read + x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) + to request access.' + post: + description: Create a new pipeline. + operationId: CreatePipeline + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/ObservabilityPipelineSpec' + required: true + responses: + '201': + content: + application/json: + schema: + $ref: '#/components/schemas/ObservabilityPipeline' + description: OK + '400': + $ref: '#/components/responses/BadRequestResponse' + '403': + $ref: '#/components/responses/NotAuthorizedResponse' + '409': + $ref: '#/components/responses/ConflictResponse' + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + summary: Create a new pipeline + tags: + - Observability Pipelines + x-permission: + operator: OR + permissions: + - observability_pipelines_deploy + x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) + to request access.' + /api/v2/obs-pipelines/pipelines/validate: + post: + description: 'Validates a pipeline configuration without creating or updating + any resources. + + Returns a list of validation errors, if any.' + operationId: ValidatePipeline + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/ObservabilityPipelineSpec' + required: true + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/ValidationResponse' + description: OK + '400': + $ref: '#/components/responses/BadRequestResponse' + '403': + $ref: '#/components/responses/NotAuthorizedResponse' + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + summary: Validate an observability pipeline + tags: + - Observability Pipelines + x-permission: + operator: OR + permissions: + - observability_pipelines_read + x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) + to request access.' /api/v2/on-call/escalation-policies: post: description: Create a new On-Call escalation policy @@ -79313,103 +79824,6 @@ paths: tags: - CSM Threats x-codegen-request-body-name: body - /api/v2/remote_config/products/obs_pipelines/pipelines: - get: - description: Retrieve a list of pipelines. - operationId: ListPipelines - parameters: - - $ref: '#/components/parameters/PageSize' - - $ref: '#/components/parameters/PageNumber' - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/ListPipelinesResponse' - description: OK - '400': - $ref: '#/components/responses/BadRequestResponse' - '403': - $ref: '#/components/responses/NotAuthorizedResponse' - '429': - $ref: '#/components/responses/TooManyRequestsResponse' - summary: List pipelines - tags: - - Observability Pipelines - x-permission: - operator: OR - permissions: - - observability_pipelines_read - x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) - to request access.' - post: - description: Create a new pipeline. - operationId: CreatePipeline - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/ObservabilityPipelineSpec' - required: true - responses: - '201': - content: - application/json: - schema: - $ref: '#/components/schemas/ObservabilityPipeline' - description: OK - '400': - $ref: '#/components/responses/BadRequestResponse' - '403': - $ref: '#/components/responses/NotAuthorizedResponse' - '409': - $ref: '#/components/responses/ConflictResponse' - '429': - $ref: '#/components/responses/TooManyRequestsResponse' - summary: Create a new pipeline - tags: - - Observability Pipelines - x-permission: - operator: OR - permissions: - - observability_pipelines_deploy - x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) - to request access.' - /api/v2/remote_config/products/obs_pipelines/pipelines/validate: - post: - description: 'Validates a pipeline configuration without creating or updating - any resources. - - Returns a list of validation errors, if any.' - operationId: ValidatePipeline - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/ObservabilityPipelineSpec' - required: true - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/ValidationResponse' - description: OK - '400': - $ref: '#/components/responses/BadRequestResponse' - '403': - $ref: '#/components/responses/NotAuthorizedResponse' - '429': - $ref: '#/components/responses/TooManyRequestsResponse' - summary: Validate an observability pipeline - tags: - - Observability Pipelines - x-permission: - operator: OR - permissions: - - observability_pipelines_read - x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) - to request access.' /api/v2/remote_config/products/obs_pipelines/pipelines/{pipeline_id}: delete: description: Delete a pipeline. diff --git a/examples/v2/observability-pipelines/CreatePipeline.rb b/examples/v2/observability-pipelines/CreatePipeline.rb index 26427950224e..fd052c1ac10f 100644 --- a/examples/v2/observability-pipelines/CreatePipeline.rb +++ b/examples/v2/observability-pipelines/CreatePipeline.rb @@ -19,7 +19,7 @@ type: DatadogAPIClient::V2::ObservabilityPipelineDatadogLogsDestinationType::DATADOG_LOGS, }), ], - processors: [ + processor_groups: [ DatadogAPIClient::V2::ObservabilityPipelineConfigProcessorGroup.new({ enabled: true, id: "my-processor-group", diff --git a/examples/v2/observability-pipelines/UpdatePipeline.rb b/examples/v2/observability-pipelines/UpdatePipeline.rb index dc510dc3dde1..d4591d88914b 100644 --- a/examples/v2/observability-pipelines/UpdatePipeline.rb +++ b/examples/v2/observability-pipelines/UpdatePipeline.rb @@ -22,7 +22,7 @@ type: DatadogAPIClient::V2::ObservabilityPipelineDatadogLogsDestinationType::DATADOG_LOGS, }), ], - processors: [ + processor_groups: [ DatadogAPIClient::V2::ObservabilityPipelineConfigProcessorGroup.new({ enabled: true, id: "my-processor-group", diff --git a/examples/v2/observability-pipelines/ValidatePipeline.rb b/examples/v2/observability-pipelines/ValidatePipeline.rb index ab3f267c04a2..01ff8243ed1b 100644 --- a/examples/v2/observability-pipelines/ValidatePipeline.rb +++ b/examples/v2/observability-pipelines/ValidatePipeline.rb @@ -19,7 +19,7 @@ type: DatadogAPIClient::V2::ObservabilityPipelineDatadogLogsDestinationType::DATADOG_LOGS, }), ], - processors: [ + processor_groups: [ DatadogAPIClient::V2::ObservabilityPipelineConfigProcessorGroup.new({ enabled: true, id: "my-processor-group", diff --git a/features/scenarios_model_mapping.rb b/features/scenarios_model_mapping.rb index 5980fcaf9919..573635c3b23e 100644 --- a/features/scenarios_model_mapping.rb +++ b/features/scenarios_model_mapping.rb @@ -2624,6 +2624,26 @@ "tags" => "String", "limit" => "Integer", }, + "v2.ListPipelines" => { + "page_size" => "Integer", + "page_number" => "Integer", + }, + "v2.CreatePipeline" => { + "body" => "ObservabilityPipelineSpec", + }, + "v2.ValidatePipeline" => { + "body" => "ObservabilityPipelineSpec", + }, + "v2.DeletePipeline" => { + "pipeline_id" => "String", + }, + "v2.GetPipeline" => { + "pipeline_id" => "String", + }, + "v2.UpdatePipeline" => { + "pipeline_id" => "String", + "body" => "ObservabilityPipeline", + }, "v2.CreateOnCallEscalationPolicy" => { "include" => "String", "body" => "EscalationPolicyCreateRequest", @@ -2965,26 +2985,6 @@ "agent_rule_id" => "String", "body" => "CloudWorkloadSecurityAgentRuleUpdateRequest", }, - "v2.ListPipelines" => { - "page_size" => "Integer", - "page_number" => "Integer", - }, - "v2.CreatePipeline" => { - "body" => "ObservabilityPipelineSpec", - }, - "v2.ValidatePipeline" => { - "body" => "ObservabilityPipelineSpec", - }, - "v2.DeletePipeline" => { - "pipeline_id" => "String", - }, - "v2.GetPipeline" => { - "pipeline_id" => "String", - }, - "v2.UpdatePipeline" => { - "pipeline_id" => "String", - "body" => "ObservabilityPipeline", - }, "v2.DeleteRestrictionPolicy" => { "resource_id" => "String", }, diff --git a/features/v2/given.json b/features/v2/given.json index aa08181879c7..3a01a060c5bd 100644 --- a/features/v2/given.json +++ b/features/v2/given.json @@ -727,6 +727,18 @@ "tag": "Monitors", "operationId": "CreateMonitorUserTemplate" }, + { + "parameters": [ + { + "name": "body", + "value": "{\n \"data\":{\n \"attributes\":{\n \"config\":{\n \"destinations\":[\n {\n \"id\":\"datadog-logs-destination\",\n \"inputs\":[\n \"processor-group-0\"\n ],\n \"type\":\"datadog_logs\"\n }\n ],\n \"processor_groups\":[\n {\n \"id\":\"processor-group-0\",\n \"include\":\"service:my-service\",\n \"display_name\": \"My Processor Group\",\n \"inputs\":[\n \"datadog-agent-source\"\n ],\n \"enabled\": true,\n \"processors\": [\n {\n \"id\": \"filter-processor\",\n \"type\": \"filter\",\n \"include\": \"status:error\",\n \"display_name\": \"My Filter Processor\",\n \"enabled\": true\n }\n ]\n }\n ],\n \"sources\":[\n {\n \"id\":\"datadog-agent-source\",\n \"type\":\"datadog_agent\"\n }\n ]\n },\n \"name\":\"Main Observability Pipeline\"\n },\n \"type\":\"pipelines\"\n }\n}" + } + ], + "step": "there is a valid \"pipeline\" in the system", + "key": "pipeline", + "tag": "Observability Pipelines", + "operationId": "CreatePipeline" + }, { "parameters": [ { @@ -879,18 +891,6 @@ "tag": "CSM Threats", "operationId": "CreateCSMThreatsAgentPolicy" }, - { - "parameters": [ - { - "name": "body", - "value": "{\n \"data\":{\n \"attributes\":{\n \"config\":{\n \"destinations\":[\n {\n \"id\":\"datadog-logs-destination\",\n \"inputs\":[\n \"processor-group-0\"\n ],\n \"type\":\"datadog_logs\"\n }\n ],\n \"processors\":[\n {\n \"id\":\"processor-group-0\",\n \"include\":\"service:my-service\",\n \"display_name\": \"My Processor Group\",\n \"inputs\":[\n \"datadog-agent-source\"\n ],\n \"enabled\": true,\n \"processors\": [\n {\n \"id\": \"filter-processor\",\n \"type\": \"filter\",\n \"include\": \"status:error\",\n \"display_name\": \"My Filter Processor\",\n \"enabled\": true\n }\n ]\n }\n ],\n \"sources\":[\n {\n \"id\":\"datadog-agent-source\",\n \"type\":\"datadog_agent\"\n }\n ]\n },\n \"name\":\"Main Observability Pipeline\"\n },\n \"type\":\"pipelines\"\n }\n}" - } - ], - "step": "there is a valid \"pipeline\" in the system", - "key": "pipeline", - "tag": "Observability Pipelines", - "operationId": "CreatePipeline" - }, { "parameters": [ { diff --git a/features/v2/observability_pipelines.feature b/features/v2/observability_pipelines.feature index c43fa8b3b767..afe602c3ba97 100644 --- a/features/v2/observability_pipelines.feature +++ b/features/v2/observability_pipelines.feature @@ -12,7 +12,7 @@ Feature: Observability Pipelines Scenario: Create a new pipeline returns "Bad Request" response Given operation "CreatePipeline" enabled And new "CreatePipeline" request - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "unknown-processor", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processor_groups": [{"enabled": true, "id": "unknown-processor", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} When the request is sent Then the response status is 400 Bad Request @@ -20,7 +20,7 @@ Feature: Observability Pipelines Scenario: Create a new pipeline returns "Conflict" response Given operation "CreatePipeline" enabled And new "CreatePipeline" request - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["filter-processor"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}, {"enabled": true, "field": "message", "id": "json-processor", "include": "*", "type": "parse_json"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["filter-processor"], "type": "datadog_logs"}], "processor_groups": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}, {"enabled": true, "field": "message", "id": "json-processor", "include": "*", "type": "parse_json"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} When the request is sent Then the response status is 409 Conflict @@ -28,7 +28,7 @@ Feature: Observability Pipelines Scenario: Create a new pipeline returns "OK" response Given operation "CreatePipeline" enabled And new "CreatePipeline" request - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processor_groups": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} When the request is sent Then the response status is 201 OK And the response "data" has field "id" @@ -106,7 +106,7 @@ Feature: Observability Pipelines And new "UpdatePipeline" request And there is a valid "pipeline" in the system And request contains "pipeline_id" parameter from "pipeline.data.id" - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "unknown-processor", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processor_groups": [{"enabled": true, "id": "unknown-processor", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} When the request is sent Then the response status is 400 Bad Request @@ -115,7 +115,7 @@ Feature: Observability Pipelines Given operation "UpdatePipeline" enabled And new "UpdatePipeline" request And request contains "pipeline_id" parameter from "REPLACE.ME" - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["filter-processor"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}, {"enabled": true, "field": "message", "id": "json-processor", "include": "*", "type": "parse_json"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["filter-processor"], "type": "datadog_logs"}], "processor_groups": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}, {"enabled": true, "field": "message", "id": "json-processor", "include": "*", "type": "parse_json"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} When the request is sent Then the response status is 409 Conflict @@ -124,7 +124,7 @@ Feature: Observability Pipelines Given operation "UpdatePipeline" enabled And new "UpdatePipeline" request And request contains "pipeline_id" parameter with value "3fa85f64-5717-4562-b3fc-2c963f66afa6" - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processor_groups": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} When the request is sent Then the response status is 404 Not Found @@ -134,7 +134,7 @@ Feature: Observability Pipelines And there is a valid "pipeline" in the system And new "UpdatePipeline" request And request contains "pipeline_id" parameter from "pipeline.data.id" - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "updated-datadog-logs-destination-id", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Updated Pipeline Name"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "updated-datadog-logs-destination-id", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processor_groups": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Updated Pipeline Name"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} When the request is sent Then the response status is 200 OK And the response "data" has field "id" @@ -149,7 +149,7 @@ Feature: Observability Pipelines Scenario: Validate an observability pipeline returns "Bad Request" response Given operation "ValidatePipeline" enabled And new "ValidatePipeline" request - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processor_groups": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} When the request is sent Then the response status is 400 Bad Request And the response "errors[0].title" is equal to "Field 'include' is required" @@ -161,7 +161,7 @@ Feature: Observability Pipelines Scenario: Validate an observability pipeline returns "OK" response Given operation "ValidatePipeline" enabled And new "ValidatePipeline" request - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processor_groups": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} When the request is sent Then the response status is 200 OK And the response "errors" has length 0 diff --git a/features/v2/undo.json b/features/v2/undo.json index eb43f26dbec6..25c59473dad7 100644 --- a/features/v2/undo.json +++ b/features/v2/undo.json @@ -2838,6 +2838,31 @@ "type": "safe" } }, + "ListPipelines": { + "tag": "Observability Pipelines", + "undo": { + "type": "safe" + } + }, + "CreatePipeline": { + "tag": "Observability Pipelines", + "undo": { + "operationId": "DeletePipeline", + "parameters": [ + { + "name": "pipeline_id", + "source": "data.id" + } + ], + "type": "unsafe" + } + }, + "ValidatePipeline": { + "tag": "Observability Pipelines", + "undo": { + "type": "safe" + } + }, "CreateOnCallEscalationPolicy": { "tag": "On-Call", "undo": { @@ -3443,31 +3468,6 @@ "type": "idempotent" } }, - "ListPipelines": { - "tag": "Observability Pipelines", - "undo": { - "type": "safe" - } - }, - "CreatePipeline": { - "tag": "Observability Pipelines", - "undo": { - "operationId": "DeletePipeline", - "parameters": [ - { - "name": "pipeline_id", - "source": "data.id" - } - ], - "type": "unsafe" - } - }, - "ValidatePipeline": { - "tag": "Observability Pipelines", - "undo": { - "type": "safe" - } - }, "DeletePipeline": { "tag": "Observability Pipelines", "undo": { diff --git a/lib/datadog_api_client/configuration.rb b/lib/datadog_api_client/configuration.rb index 95ddf1221cda..05283f260c46 100644 --- a/lib/datadog_api_client/configuration.rb +++ b/lib/datadog_api_client/configuration.rb @@ -301,6 +301,12 @@ def initialize "v2.update_monitor_user_template": false, "v2.validate_existing_monitor_user_template": false, "v2.validate_monitor_user_template": false, + "v2.create_pipeline": false, + "v2.delete_pipeline": false, + "v2.get_pipeline": false, + "v2.list_pipelines": false, + "v2.update_pipeline": false, + "v2.validate_pipeline": false, "v2.list_role_templates": false, "v2.create_connection": false, "v2.delete_connection": false, @@ -312,12 +318,6 @@ def initialize "v2.query_event_filtered_users": false, "v2.query_users": false, "v2.update_connection": false, - "v2.create_pipeline": false, - "v2.delete_pipeline": false, - "v2.get_pipeline": false, - "v2.list_pipelines": false, - "v2.update_pipeline": false, - "v2.validate_pipeline": false, "v2.create_scorecard_outcomes_batch": false, "v2.create_scorecard_rule": false, "v2.delete_scorecard_rule": false, diff --git a/lib/datadog_api_client/inflector.rb b/lib/datadog_api_client/inflector.rb index 022391df7878..9872c80c4b9c 100644 --- a/lib/datadog_api_client/inflector.rb +++ b/lib/datadog_api_client/inflector.rb @@ -3139,6 +3139,8 @@ def overrides "v2.observability_pipeline_add_env_vars_processor_variable" => "ObservabilityPipelineAddEnvVarsProcessorVariable", "v2.observability_pipeline_add_fields_processor" => "ObservabilityPipelineAddFieldsProcessor", "v2.observability_pipeline_add_fields_processor_type" => "ObservabilityPipelineAddFieldsProcessorType", + "v2.observability_pipeline_add_hostname_processor" => "ObservabilityPipelineAddHostnameProcessor", + "v2.observability_pipeline_add_hostname_processor_type" => "ObservabilityPipelineAddHostnameProcessorType", "v2.observability_pipeline_amazon_data_firehose_source" => "ObservabilityPipelineAmazonDataFirehoseSource", "v2.observability_pipeline_amazon_data_firehose_source_type" => "ObservabilityPipelineAmazonDataFirehoseSourceType", "v2.observability_pipeline_amazon_open_search_destination" => "ObservabilityPipelineAmazonOpenSearchDestination", @@ -3153,6 +3155,8 @@ def overrides "v2.observability_pipeline_amazon_security_lake_destination" => "ObservabilityPipelineAmazonSecurityLakeDestination", "v2.observability_pipeline_amazon_security_lake_destination_type" => "ObservabilityPipelineAmazonSecurityLakeDestinationType", "v2.observability_pipeline_aws_auth" => "ObservabilityPipelineAwsAuth", + "v2.observability_pipeline_cloud_prem_destination" => "ObservabilityPipelineCloudPremDestination", + "v2.observability_pipeline_cloud_prem_destination_type" => "ObservabilityPipelineCloudPremDestinationType", "v2.observability_pipeline_config" => "ObservabilityPipelineConfig", "v2.observability_pipeline_config_destination_item" => "ObservabilityPipelineConfigDestinationItem", "v2.observability_pipeline_config_processor_group" => "ObservabilityPipelineConfigProcessorGroup", @@ -3182,6 +3186,7 @@ def overrides "v2.observability_pipeline_dedupe_processor_type" => "ObservabilityPipelineDedupeProcessorType", "v2.observability_pipeline_elasticsearch_destination" => "ObservabilityPipelineElasticsearchDestination", "v2.observability_pipeline_elasticsearch_destination_api_version" => "ObservabilityPipelineElasticsearchDestinationApiVersion", + "v2.observability_pipeline_elasticsearch_destination_data_stream" => "ObservabilityPipelineElasticsearchDestinationDataStream", "v2.observability_pipeline_elasticsearch_destination_type" => "ObservabilityPipelineElasticsearchDestinationType", "v2.observability_pipeline_enrichment_table_file" => "ObservabilityPipelineEnrichmentTableFile", "v2.observability_pipeline_enrichment_table_file_encoding" => "ObservabilityPipelineEnrichmentTableFileEncoding", @@ -3193,6 +3198,7 @@ def overrides "v2.observability_pipeline_enrichment_table_geo_ip" => "ObservabilityPipelineEnrichmentTableGeoIp", "v2.observability_pipeline_enrichment_table_processor" => "ObservabilityPipelineEnrichmentTableProcessor", "v2.observability_pipeline_enrichment_table_processor_type" => "ObservabilityPipelineEnrichmentTableProcessorType", + "v2.observability_pipeline_enrichment_table_reference_table" => "ObservabilityPipelineEnrichmentTableReferenceTable", "v2.observability_pipeline_field_value" => "ObservabilityPipelineFieldValue", "v2.observability_pipeline_filter_processor" => "ObservabilityPipelineFilterProcessor", "v2.observability_pipeline_filter_processor_type" => "ObservabilityPipelineFilterProcessorType", @@ -3227,9 +3233,14 @@ def overrides "v2.observability_pipeline_http_server_source" => "ObservabilityPipelineHttpServerSource", "v2.observability_pipeline_http_server_source_auth_strategy" => "ObservabilityPipelineHttpServerSourceAuthStrategy", "v2.observability_pipeline_http_server_source_type" => "ObservabilityPipelineHttpServerSourceType", + "v2.observability_pipeline_kafka_destination" => "ObservabilityPipelineKafkaDestination", + "v2.observability_pipeline_kafka_destination_compression" => "ObservabilityPipelineKafkaDestinationCompression", + "v2.observability_pipeline_kafka_destination_encoding" => "ObservabilityPipelineKafkaDestinationEncoding", + "v2.observability_pipeline_kafka_destination_type" => "ObservabilityPipelineKafkaDestinationType", + "v2.observability_pipeline_kafka_librdkafka_option" => "ObservabilityPipelineKafkaLibrdkafkaOption", + "v2.observability_pipeline_kafka_sasl" => "ObservabilityPipelineKafkaSasl", + "v2.observability_pipeline_kafka_sasl_mechanism" => "ObservabilityPipelineKafkaSaslMechanism", "v2.observability_pipeline_kafka_source" => "ObservabilityPipelineKafkaSource", - "v2.observability_pipeline_kafka_source_librdkafka_option" => "ObservabilityPipelineKafkaSourceLibrdkafkaOption", - "v2.observability_pipeline_kafka_source_sasl" => "ObservabilityPipelineKafkaSourceSasl", "v2.observability_pipeline_kafka_source_type" => "ObservabilityPipelineKafkaSourceType", "v2.observability_pipeline_logstash_source" => "ObservabilityPipelineLogstashSource", "v2.observability_pipeline_logstash_source_type" => "ObservabilityPipelineLogstashSourceType", @@ -3245,6 +3256,8 @@ def overrides "v2.observability_pipeline_ocsf_mapping_library" => "ObservabilityPipelineOcsfMappingLibrary", "v2.observability_pipeline_open_search_destination" => "ObservabilityPipelineOpenSearchDestination", "v2.observability_pipeline_open_search_destination_type" => "ObservabilityPipelineOpenSearchDestinationType", + "v2.observability_pipeline_opentelemetry_source" => "ObservabilityPipelineOpentelemetrySource", + "v2.observability_pipeline_opentelemetry_source_type" => "ObservabilityPipelineOpentelemetrySourceType", "v2.observability_pipeline_parse_grok_processor" => "ObservabilityPipelineParseGrokProcessor", "v2.observability_pipeline_parse_grok_processor_rule" => "ObservabilityPipelineParseGrokProcessorRule", "v2.observability_pipeline_parse_grok_processor_rule_match_rule" => "ObservabilityPipelineParseGrokProcessorRuleMatchRule", @@ -3252,7 +3265,8 @@ def overrides "v2.observability_pipeline_parse_grok_processor_type" => "ObservabilityPipelineParseGrokProcessorType", "v2.observability_pipeline_parse_json_processor" => "ObservabilityPipelineParseJSONProcessor", "v2.observability_pipeline_parse_json_processor_type" => "ObservabilityPipelineParseJSONProcessorType", - "v2.observability_pipeline_pipeline_kafka_source_sasl_mechanism" => "ObservabilityPipelinePipelineKafkaSourceSaslMechanism", + "v2.observability_pipeline_parse_xml_processor" => "ObservabilityPipelineParseXMLProcessor", + "v2.observability_pipeline_parse_xml_processor_type" => "ObservabilityPipelineParseXMLProcessorType", "v2.observability_pipeline_quota_processor" => "ObservabilityPipelineQuotaProcessor", "v2.observability_pipeline_quota_processor_limit" => "ObservabilityPipelineQuotaProcessorLimit", "v2.observability_pipeline_quota_processor_limit_enforce_type" => "ObservabilityPipelineQuotaProcessorLimitEnforceType", @@ -3333,6 +3347,9 @@ def overrides "v2.observability_pipeline_socket_source_type" => "ObservabilityPipelineSocketSourceType", "v2.observability_pipeline_spec" => "ObservabilityPipelineSpec", "v2.observability_pipeline_spec_data" => "ObservabilityPipelineSpecData", + "v2.observability_pipeline_split_array_processor" => "ObservabilityPipelineSplitArrayProcessor", + "v2.observability_pipeline_split_array_processor_array_config" => "ObservabilityPipelineSplitArrayProcessorArrayConfig", + "v2.observability_pipeline_split_array_processor_type" => "ObservabilityPipelineSplitArrayProcessorType", "v2.observability_pipeline_splunk_hec_destination" => "ObservabilityPipelineSplunkHecDestination", "v2.observability_pipeline_splunk_hec_destination_encoding" => "ObservabilityPipelineSplunkHecDestinationEncoding", "v2.observability_pipeline_splunk_hec_destination_type" => "ObservabilityPipelineSplunkHecDestinationType", diff --git a/lib/datadog_api_client/v2/api/observability_pipelines_api.rb b/lib/datadog_api_client/v2/api/observability_pipelines_api.rb index 2bc926176292..dc1c17d03832 100644 --- a/lib/datadog_api_client/v2/api/observability_pipelines_api.rb +++ b/lib/datadog_api_client/v2/api/observability_pipelines_api.rb @@ -54,7 +54,7 @@ def create_pipeline_with_http_info(body, opts = {}) fail ArgumentError, "Missing the required parameter 'body' when calling ObservabilityPipelinesAPI.create_pipeline" end # resource path - local_var_path = '/api/v2/remote_config/products/obs_pipelines/pipelines' + local_var_path = '/api/v2/obs-pipelines/pipelines' # query parameters query_params = opts[:query_params] || {} @@ -266,7 +266,7 @@ def list_pipelines_with_http_info(opts = {}) @api_client.config.logger.debug 'Calling API: ObservabilityPipelinesAPI.list_pipelines ...' end # resource path - local_var_path = '/api/v2/remote_config/products/obs_pipelines/pipelines' + local_var_path = '/api/v2/obs-pipelines/pipelines' # query parameters query_params = opts[:query_params] || {} @@ -418,7 +418,7 @@ def validate_pipeline_with_http_info(body, opts = {}) fail ArgumentError, "Missing the required parameter 'body' when calling ObservabilityPipelinesAPI.validate_pipeline" end # resource path - local_var_path = '/api/v2/remote_config/products/obs_pipelines/pipelines/validate' + local_var_path = '/api/v2/obs-pipelines/pipelines/validate' # query parameters query_params = opts[:query_params] || {} diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_add_hostname_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_add_hostname_processor.rb new file mode 100644 index 000000000000..79b62c26b114 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_add_hostname_processor.rb @@ -0,0 +1,196 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The `add_hostname` processor adds the hostname to log events. + class ObservabilityPipelineAddHostnameProcessor + include BaseGenericModel + + # The display name for a component. + attr_accessor :display_name + + # Whether this processor is enabled. + attr_reader :enabled + + # The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components). + attr_reader :id + + # A Datadog search query used to determine which logs this processor targets. + attr_reader :include + + # The processor type. The value should always be `add_hostname`. + attr_reader :type + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'display_name' => :'display_name', + :'enabled' => :'enabled', + :'id' => :'id', + :'include' => :'include', + :'type' => :'type' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'display_name' => :'String', + :'enabled' => :'Boolean', + :'id' => :'String', + :'include' => :'String', + :'type' => :'ObservabilityPipelineAddHostnameProcessorType' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineAddHostnameProcessor` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'display_name') + self.display_name = attributes[:'display_name'] + end + + if attributes.key?(:'enabled') + self.enabled = attributes[:'enabled'] + end + + if attributes.key?(:'id') + self.id = attributes[:'id'] + end + + if attributes.key?(:'include') + self.include = attributes[:'include'] + end + + if attributes.key?(:'type') + self.type = attributes[:'type'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @enabled.nil? + return false if @id.nil? + return false if @include.nil? + return false if @type.nil? + true + end + + # Custom attribute writer method with validation + # @param enabled [Object] Object to be assigned + # @!visibility private + def enabled=(enabled) + if enabled.nil? + fail ArgumentError, 'invalid value for "enabled", enabled cannot be nil.' + end + @enabled = enabled + end + + # Custom attribute writer method with validation + # @param id [Object] Object to be assigned + # @!visibility private + def id=(id) + if id.nil? + fail ArgumentError, 'invalid value for "id", id cannot be nil.' + end + @id = id + end + + # Custom attribute writer method with validation + # @param include [Object] Object to be assigned + # @!visibility private + def include=(include) + if include.nil? + fail ArgumentError, 'invalid value for "include", include cannot be nil.' + end + @include = include + end + + # Custom attribute writer method with validation + # @param type [Object] Object to be assigned + # @!visibility private + def type=(type) + if type.nil? + fail ArgumentError, 'invalid value for "type", type cannot be nil.' + end + @type = type + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + display_name == o.display_name && + enabled == o.enabled && + id == o.id && + include == o.include && + type == o.type && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [display_name, enabled, id, include, type, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_add_hostname_processor_type.rb b/lib/datadog_api_client/v2/models/observability_pipeline_add_hostname_processor_type.rb new file mode 100644 index 000000000000..5eca2a92ffb4 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_add_hostname_processor_type.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The processor type. The value should always be `add_hostname`. + class ObservabilityPipelineAddHostnameProcessorType + include BaseEnumModel + + ADD_HOSTNAME = "add_hostname".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_cloud_prem_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_cloud_prem_destination.rb new file mode 100644 index 000000000000..05a04dc34b81 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_cloud_prem_destination.rb @@ -0,0 +1,167 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The `cloud_prem` destination sends logs to Datadog CloudPrem. + class ObservabilityPipelineCloudPremDestination + include BaseGenericModel + + # The unique identifier for this component. + attr_reader :id + + # A list of component IDs whose output is used as the `input` for this component. + attr_reader :inputs + + # The destination type. The value should always be `cloud_prem`. + attr_reader :type + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'id' => :'id', + :'inputs' => :'inputs', + :'type' => :'type' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'id' => :'String', + :'inputs' => :'Array', + :'type' => :'ObservabilityPipelineCloudPremDestinationType' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineCloudPremDestination` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'id') + self.id = attributes[:'id'] + end + + if attributes.key?(:'inputs') + if (value = attributes[:'inputs']).is_a?(Array) + self.inputs = value + end + end + + if attributes.key?(:'type') + self.type = attributes[:'type'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @id.nil? + return false if @inputs.nil? + return false if @type.nil? + true + end + + # Custom attribute writer method with validation + # @param id [Object] Object to be assigned + # @!visibility private + def id=(id) + if id.nil? + fail ArgumentError, 'invalid value for "id", id cannot be nil.' + end + @id = id + end + + # Custom attribute writer method with validation + # @param inputs [Object] Object to be assigned + # @!visibility private + def inputs=(inputs) + if inputs.nil? + fail ArgumentError, 'invalid value for "inputs", inputs cannot be nil.' + end + @inputs = inputs + end + + # Custom attribute writer method with validation + # @param type [Object] Object to be assigned + # @!visibility private + def type=(type) + if type.nil? + fail ArgumentError, 'invalid value for "type", type cannot be nil.' + end + @type = type + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + id == o.id && + inputs == o.inputs && + type == o.type && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [id, inputs, type, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_cloud_prem_destination_type.rb b/lib/datadog_api_client/v2/models/observability_pipeline_cloud_prem_destination_type.rb new file mode 100644 index 000000000000..8f235942f08e --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_cloud_prem_destination_type.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The destination type. The value should always be `cloud_prem`. + class ObservabilityPipelineCloudPremDestinationType + include BaseEnumModel + + CLOUD_PREM = "cloud_prem".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_config.rb b/lib/datadog_api_client/v2/models/observability_pipeline_config.rb index 04005274e028..cd7634d69f7a 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_config.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_config.rb @@ -25,7 +25,7 @@ class ObservabilityPipelineConfig attr_reader :destinations # A list of processor groups that transform or enrich log data. - attr_accessor :processors + attr_accessor :processor_groups # A list of configured data sources for the pipeline. attr_reader :sources @@ -37,7 +37,7 @@ class ObservabilityPipelineConfig def self.attribute_map { :'destinations' => :'destinations', - :'processors' => :'processors', + :'processor_groups' => :'processor_groups', :'sources' => :'sources' } end @@ -47,7 +47,7 @@ def self.attribute_map def self.openapi_types { :'destinations' => :'Array', - :'processors' => :'Array', + :'processor_groups' => :'Array', :'sources' => :'Array' } end @@ -76,9 +76,9 @@ def initialize(attributes = {}) end end - if attributes.key?(:'processors') - if (value = attributes[:'processors']).is_a?(Array) - self.processors = value + if attributes.key?(:'processor_groups') + if (value = attributes[:'processor_groups']).is_a?(Array) + self.processor_groups = value end end @@ -145,7 +145,7 @@ def ==(o) return true if self.equal?(o) self.class == o.class && destinations == o.destinations && - processors == o.processors && + processor_groups == o.processor_groups && sources == o.sources && additional_properties == o.additional_properties end @@ -154,7 +154,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [destinations, processors, sources, additional_properties].hash + [destinations, processor_groups, sources, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_config_destination_item.rb b/lib/datadog_api_client/v2/models/observability_pipeline_config_destination_item.rb index 00386a1aa190..4b7114df2f5a 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_config_destination_item.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_config_destination_item.rb @@ -27,6 +27,7 @@ class << self def openapi_one_of [ :'ObservabilityPipelineDatadogLogsDestination', + :'ObservabilityPipelineCloudPremDestination', :'ObservabilityPipelineAmazonS3Destination', :'ObservabilityPipelineGoogleCloudStorageDestination', :'ObservabilityPipelineSplunkHecDestination', @@ -44,7 +45,8 @@ def openapi_one_of :'ObservabilityPipelineSocketDestination', :'ObservabilityPipelineAmazonSecurityLakeDestination', :'ObservabilityPipelineCrowdStrikeNextGenSiemDestination', - :'ObservabilityPipelineGooglePubSubDestination' + :'ObservabilityPipelineGooglePubSubDestination', + :'ObservabilityPipelineKafkaDestination' ] end # Builds the object diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_config_processor_item.rb b/lib/datadog_api_client/v2/models/observability_pipeline_config_processor_item.rb index 334f6e14d8a9..81425314ebeb 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_config_processor_item.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_config_processor_item.rb @@ -26,24 +26,27 @@ class << self # List of class defined in oneOf (OpenAPI v3) def openapi_one_of [ + :'ObservabilityPipelineAddEnvVarsProcessor', + :'ObservabilityPipelineAddFieldsProcessor', + :'ObservabilityPipelineAddHostnameProcessor', + :'ObservabilityPipelineCustomProcessor', + :'ObservabilityPipelineDatadogTagsProcessor', + :'ObservabilityPipelineDedupeProcessor', + :'ObservabilityPipelineEnrichmentTableProcessor', :'ObservabilityPipelineFilterProcessor', + :'ObservabilityPipelineGenerateMetricsProcessor', + :'ObservabilityPipelineOcsfMapperProcessor', + :'ObservabilityPipelineParseGrokProcessor', :'ObservabilityPipelineParseJSONProcessor', + :'ObservabilityPipelineParseXMLProcessor', :'ObservabilityPipelineQuotaProcessor', - :'ObservabilityPipelineAddFieldsProcessor', + :'ObservabilityPipelineReduceProcessor', :'ObservabilityPipelineRemoveFieldsProcessor', :'ObservabilityPipelineRenameFieldsProcessor', - :'ObservabilityPipelineGenerateMetricsProcessor', :'ObservabilityPipelineSampleProcessor', - :'ObservabilityPipelineParseGrokProcessor', :'ObservabilityPipelineSensitiveDataScannerProcessor', - :'ObservabilityPipelineOcsfMapperProcessor', - :'ObservabilityPipelineAddEnvVarsProcessor', - :'ObservabilityPipelineDedupeProcessor', - :'ObservabilityPipelineEnrichmentTableProcessor', - :'ObservabilityPipelineReduceProcessor', - :'ObservabilityPipelineThrottleProcessor', - :'ObservabilityPipelineCustomProcessor', - :'ObservabilityPipelineDatadogTagsProcessor' + :'ObservabilityPipelineSplitArrayProcessor', + :'ObservabilityPipelineThrottleProcessor' ] end # Builds the object diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_config_source_item.rb b/lib/datadog_api_client/v2/models/observability_pipeline_config_source_item.rb index 6c5e58312de6..9309d0b3f914 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_config_source_item.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_config_source_item.rb @@ -41,7 +41,8 @@ def openapi_one_of :'ObservabilityPipelineGooglePubSubSource', :'ObservabilityPipelineHttpClientSource', :'ObservabilityPipelineLogstashSource', - :'ObservabilityPipelineSocketSource' + :'ObservabilityPipelineSocketSource', + :'ObservabilityPipelineOpentelemetrySource' ] end # Builds the object diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_elasticsearch_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_elasticsearch_destination.rb index f49df443a047..b75b4685080c 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_elasticsearch_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_elasticsearch_destination.rb @@ -27,6 +27,9 @@ class ObservabilityPipelineElasticsearchDestination # The index to write logs to in Elasticsearch. attr_accessor :bulk_index + # Configuration options for writing to Elasticsearch Data Streams instead of a fixed index. + attr_accessor :data_stream + # The unique identifier for this component. attr_reader :id @@ -44,6 +47,7 @@ def self.attribute_map { :'api_version' => :'api_version', :'bulk_index' => :'bulk_index', + :'data_stream' => :'data_stream', :'id' => :'id', :'inputs' => :'inputs', :'type' => :'type' @@ -56,6 +60,7 @@ def self.openapi_types { :'api_version' => :'ObservabilityPipelineElasticsearchDestinationApiVersion', :'bulk_index' => :'String', + :'data_stream' => :'ObservabilityPipelineElasticsearchDestinationDataStream', :'id' => :'String', :'inputs' => :'Array', :'type' => :'ObservabilityPipelineElasticsearchDestinationType' @@ -88,6 +93,10 @@ def initialize(attributes = {}) self.bulk_index = attributes[:'bulk_index'] end + if attributes.key?(:'data_stream') + self.data_stream = attributes[:'data_stream'] + end + if attributes.key?(:'id') self.id = attributes[:'id'] end @@ -171,6 +180,7 @@ def ==(o) self.class == o.class && api_version == o.api_version && bulk_index == o.bulk_index && + data_stream == o.data_stream && id == o.id && inputs == o.inputs && type == o.type && @@ -181,7 +191,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [api_version, bulk_index, id, inputs, type, additional_properties].hash + [api_version, bulk_index, data_stream, id, inputs, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_elasticsearch_destination_data_stream.rb b/lib/datadog_api_client/v2/models/observability_pipeline_elasticsearch_destination_data_stream.rb new file mode 100644 index 000000000000..8bf597f0b45d --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_elasticsearch_destination_data_stream.rb @@ -0,0 +1,125 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Configuration options for writing to Elasticsearch Data Streams instead of a fixed index. + class ObservabilityPipelineElasticsearchDestinationDataStream + include BaseGenericModel + + # The data stream dataset for your logs. This groups logs by their source or application. + attr_accessor :dataset + + # The data stream type for your logs. This determines how logs are categorized within the data stream. + attr_accessor :dtype + + # The data stream namespace for your logs. This separates logs into different environments or domains. + attr_accessor :namespace + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'dataset' => :'dataset', + :'dtype' => :'dtype', + :'namespace' => :'namespace' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'dataset' => :'String', + :'dtype' => :'String', + :'namespace' => :'String' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineElasticsearchDestinationDataStream` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'dataset') + self.dataset = attributes[:'dataset'] + end + + if attributes.key?(:'dtype') + self.dtype = attributes[:'dtype'] + end + + if attributes.key?(:'namespace') + self.namespace = attributes[:'namespace'] + end + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + dataset == o.dataset && + dtype == o.dtype && + namespace == o.namespace && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [dataset, dtype, namespace, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_enrichment_table_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_enrichment_table_processor.rb index ea73dec4ede1..e716a63594e7 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_enrichment_table_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_enrichment_table_processor.rb @@ -17,7 +17,7 @@ require 'time' module DatadogAPIClient::V2 - # The `enrichment_table` processor enriches logs using a static CSV file or GeoIP database. + # The `enrichment_table` processor enriches logs using a static CSV file, GeoIP database, or reference table. Exactly one of `file`, `geoip`, or `reference_table` must be configured. class ObservabilityPipelineEnrichmentTableProcessor include BaseGenericModel @@ -39,6 +39,9 @@ class ObservabilityPipelineEnrichmentTableProcessor # A Datadog search query used to determine which logs this processor targets. attr_reader :include + # Uses a Datadog reference table to enrich logs. + attr_accessor :reference_table + # Path where enrichment results should be stored in the log. attr_reader :target @@ -57,6 +60,7 @@ def self.attribute_map :'geoip' => :'geoip', :'id' => :'id', :'include' => :'include', + :'reference_table' => :'reference_table', :'target' => :'target', :'type' => :'type' } @@ -72,6 +76,7 @@ def self.openapi_types :'geoip' => :'ObservabilityPipelineEnrichmentTableGeoIp', :'id' => :'String', :'include' => :'String', + :'reference_table' => :'ObservabilityPipelineEnrichmentTableReferenceTable', :'target' => :'String', :'type' => :'ObservabilityPipelineEnrichmentTableProcessorType' } @@ -119,6 +124,10 @@ def initialize(attributes = {}) self.include = attributes[:'include'] end + if attributes.key?(:'reference_table') + self.reference_table = attributes[:'reference_table'] + end + if attributes.key?(:'target') self.target = attributes[:'target'] end @@ -222,6 +231,7 @@ def ==(o) geoip == o.geoip && id == o.id && include == o.include && + reference_table == o.reference_table && target == o.target && type == o.type && additional_properties == o.additional_properties @@ -231,7 +241,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [display_name, enabled, file, geoip, id, include, target, type, additional_properties].hash + [display_name, enabled, file, geoip, id, include, reference_table, target, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_enrichment_table_reference_table.rb b/lib/datadog_api_client/v2/models/observability_pipeline_enrichment_table_reference_table.rb new file mode 100644 index 000000000000..2248ad6775d1 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_enrichment_table_reference_table.rb @@ -0,0 +1,156 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Uses a Datadog reference table to enrich logs. + class ObservabilityPipelineEnrichmentTableReferenceTable + include BaseGenericModel + + # List of column names to include from the reference table. If not provided, all columns are included. + attr_accessor :columns + + # Path to the field in the log event to match against the reference table. + attr_reader :key_field + + # The unique identifier of the reference table. + attr_reader :table_id + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'columns' => :'columns', + :'key_field' => :'key_field', + :'table_id' => :'table_id' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'columns' => :'Array', + :'key_field' => :'String', + :'table_id' => :'String' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineEnrichmentTableReferenceTable` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'columns') + if (value = attributes[:'columns']).is_a?(Array) + self.columns = value + end + end + + if attributes.key?(:'key_field') + self.key_field = attributes[:'key_field'] + end + + if attributes.key?(:'table_id') + self.table_id = attributes[:'table_id'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @key_field.nil? + return false if @table_id.nil? + true + end + + # Custom attribute writer method with validation + # @param key_field [Object] Object to be assigned + # @!visibility private + def key_field=(key_field) + if key_field.nil? + fail ArgumentError, 'invalid value for "key_field", key_field cannot be nil.' + end + @key_field = key_field + end + + # Custom attribute writer method with validation + # @param table_id [Object] Object to be assigned + # @!visibility private + def table_id=(table_id) + if table_id.nil? + fail ArgumentError, 'invalid value for "table_id", table_id cannot be nil.' + end + @table_id = table_id + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + columns == o.columns && + key_field == o.key_field && + table_id == o.table_id && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [columns, key_field, table_id, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_kafka_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_destination.rb new file mode 100644 index 000000000000..8e164742f96a --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_destination.rb @@ -0,0 +1,359 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The `kafka` destination sends logs to Apache Kafka topics. + class ObservabilityPipelineKafkaDestination + include BaseGenericModel + + # Compression codec for Kafka messages. + attr_accessor :compression + + # Encoding format for log events. + attr_reader :encoding + + # The field name to use for Kafka message headers. + attr_accessor :headers_key + + # The unique identifier for this component. + attr_reader :id + + # A list of component IDs whose output is used as the `input` for this component. + attr_reader :inputs + + # The field name to use as the Kafka message key. + attr_accessor :key_field + + # Optional list of advanced Kafka producer configuration options, defined as key-value pairs. + attr_accessor :librdkafka_options + + # Maximum time in milliseconds to wait for message delivery confirmation. + attr_reader :message_timeout_ms + + # Duration in seconds for the rate limit window. + attr_reader :rate_limit_duration_secs + + # Maximum number of messages allowed per rate limit duration. + attr_reader :rate_limit_num + + # Specifies the SASL mechanism for authenticating with a Kafka cluster. + attr_accessor :sasl + + # Socket timeout in milliseconds for network requests. + attr_reader :socket_timeout_ms + + # Configuration for enabling TLS encryption between the pipeline component and external services. + attr_accessor :tls + + # The Kafka topic name to publish logs to. + attr_reader :topic + + # The destination type. The value should always be `kafka`. + attr_reader :type + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'compression' => :'compression', + :'encoding' => :'encoding', + :'headers_key' => :'headers_key', + :'id' => :'id', + :'inputs' => :'inputs', + :'key_field' => :'key_field', + :'librdkafka_options' => :'librdkafka_options', + :'message_timeout_ms' => :'message_timeout_ms', + :'rate_limit_duration_secs' => :'rate_limit_duration_secs', + :'rate_limit_num' => :'rate_limit_num', + :'sasl' => :'sasl', + :'socket_timeout_ms' => :'socket_timeout_ms', + :'tls' => :'tls', + :'topic' => :'topic', + :'type' => :'type' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'compression' => :'ObservabilityPipelineKafkaDestinationCompression', + :'encoding' => :'ObservabilityPipelineKafkaDestinationEncoding', + :'headers_key' => :'String', + :'id' => :'String', + :'inputs' => :'Array', + :'key_field' => :'String', + :'librdkafka_options' => :'Array', + :'message_timeout_ms' => :'Integer', + :'rate_limit_duration_secs' => :'Integer', + :'rate_limit_num' => :'Integer', + :'sasl' => :'ObservabilityPipelineKafkaSasl', + :'socket_timeout_ms' => :'Integer', + :'tls' => :'ObservabilityPipelineTls', + :'topic' => :'String', + :'type' => :'ObservabilityPipelineKafkaDestinationType' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineKafkaDestination` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'compression') + self.compression = attributes[:'compression'] + end + + if attributes.key?(:'encoding') + self.encoding = attributes[:'encoding'] + end + + if attributes.key?(:'headers_key') + self.headers_key = attributes[:'headers_key'] + end + + if attributes.key?(:'id') + self.id = attributes[:'id'] + end + + if attributes.key?(:'inputs') + if (value = attributes[:'inputs']).is_a?(Array) + self.inputs = value + end + end + + if attributes.key?(:'key_field') + self.key_field = attributes[:'key_field'] + end + + if attributes.key?(:'librdkafka_options') + if (value = attributes[:'librdkafka_options']).is_a?(Array) + self.librdkafka_options = value + end + end + + if attributes.key?(:'message_timeout_ms') + self.message_timeout_ms = attributes[:'message_timeout_ms'] + end + + if attributes.key?(:'rate_limit_duration_secs') + self.rate_limit_duration_secs = attributes[:'rate_limit_duration_secs'] + end + + if attributes.key?(:'rate_limit_num') + self.rate_limit_num = attributes[:'rate_limit_num'] + end + + if attributes.key?(:'sasl') + self.sasl = attributes[:'sasl'] + end + + if attributes.key?(:'socket_timeout_ms') + self.socket_timeout_ms = attributes[:'socket_timeout_ms'] + end + + if attributes.key?(:'tls') + self.tls = attributes[:'tls'] + end + + if attributes.key?(:'topic') + self.topic = attributes[:'topic'] + end + + if attributes.key?(:'type') + self.type = attributes[:'type'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @encoding.nil? + return false if @id.nil? + return false if @inputs.nil? + return false if !@message_timeout_ms.nil? && @message_timeout_ms < 1 + return false if !@rate_limit_duration_secs.nil? && @rate_limit_duration_secs < 1 + return false if !@rate_limit_num.nil? && @rate_limit_num < 1 + return false if !@socket_timeout_ms.nil? && @socket_timeout_ms > 300000 + return false if !@socket_timeout_ms.nil? && @socket_timeout_ms < 10 + return false if @topic.nil? + return false if @type.nil? + true + end + + # Custom attribute writer method with validation + # @param encoding [Object] Object to be assigned + # @!visibility private + def encoding=(encoding) + if encoding.nil? + fail ArgumentError, 'invalid value for "encoding", encoding cannot be nil.' + end + @encoding = encoding + end + + # Custom attribute writer method with validation + # @param id [Object] Object to be assigned + # @!visibility private + def id=(id) + if id.nil? + fail ArgumentError, 'invalid value for "id", id cannot be nil.' + end + @id = id + end + + # Custom attribute writer method with validation + # @param inputs [Object] Object to be assigned + # @!visibility private + def inputs=(inputs) + if inputs.nil? + fail ArgumentError, 'invalid value for "inputs", inputs cannot be nil.' + end + @inputs = inputs + end + + # Custom attribute writer method with validation + # @param message_timeout_ms [Object] Object to be assigned + # @!visibility private + def message_timeout_ms=(message_timeout_ms) + if !message_timeout_ms.nil? && message_timeout_ms < 1 + fail ArgumentError, 'invalid value for "message_timeout_ms", must be greater than or equal to 1.' + end + @message_timeout_ms = message_timeout_ms + end + + # Custom attribute writer method with validation + # @param rate_limit_duration_secs [Object] Object to be assigned + # @!visibility private + def rate_limit_duration_secs=(rate_limit_duration_secs) + if !rate_limit_duration_secs.nil? && rate_limit_duration_secs < 1 + fail ArgumentError, 'invalid value for "rate_limit_duration_secs", must be greater than or equal to 1.' + end + @rate_limit_duration_secs = rate_limit_duration_secs + end + + # Custom attribute writer method with validation + # @param rate_limit_num [Object] Object to be assigned + # @!visibility private + def rate_limit_num=(rate_limit_num) + if !rate_limit_num.nil? && rate_limit_num < 1 + fail ArgumentError, 'invalid value for "rate_limit_num", must be greater than or equal to 1.' + end + @rate_limit_num = rate_limit_num + end + + # Custom attribute writer method with validation + # @param socket_timeout_ms [Object] Object to be assigned + # @!visibility private + def socket_timeout_ms=(socket_timeout_ms) + if !socket_timeout_ms.nil? && socket_timeout_ms > 300000 + fail ArgumentError, 'invalid value for "socket_timeout_ms", must be smaller than or equal to 300000.' + end + if !socket_timeout_ms.nil? && socket_timeout_ms < 10 + fail ArgumentError, 'invalid value for "socket_timeout_ms", must be greater than or equal to 10.' + end + @socket_timeout_ms = socket_timeout_ms + end + + # Custom attribute writer method with validation + # @param topic [Object] Object to be assigned + # @!visibility private + def topic=(topic) + if topic.nil? + fail ArgumentError, 'invalid value for "topic", topic cannot be nil.' + end + @topic = topic + end + + # Custom attribute writer method with validation + # @param type [Object] Object to be assigned + # @!visibility private + def type=(type) + if type.nil? + fail ArgumentError, 'invalid value for "type", type cannot be nil.' + end + @type = type + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + compression == o.compression && + encoding == o.encoding && + headers_key == o.headers_key && + id == o.id && + inputs == o.inputs && + key_field == o.key_field && + librdkafka_options == o.librdkafka_options && + message_timeout_ms == o.message_timeout_ms && + rate_limit_duration_secs == o.rate_limit_duration_secs && + rate_limit_num == o.rate_limit_num && + sasl == o.sasl && + socket_timeout_ms == o.socket_timeout_ms && + tls == o.tls && + topic == o.topic && + type == o.type && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [compression, encoding, headers_key, id, inputs, key_field, librdkafka_options, message_timeout_ms, rate_limit_duration_secs, rate_limit_num, sasl, socket_timeout_ms, tls, topic, type, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_kafka_destination_compression.rb b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_destination_compression.rb new file mode 100644 index 000000000000..ccb074bfdb21 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_destination_compression.rb @@ -0,0 +1,30 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Compression codec for Kafka messages. + class ObservabilityPipelineKafkaDestinationCompression + include BaseEnumModel + + NONE = "none".freeze + GZIP = "gzip".freeze + SNAPPY = "snappy".freeze + LZ4 = "lz4".freeze + ZSTD = "zstd".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_kafka_destination_encoding.rb b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_destination_encoding.rb new file mode 100644 index 000000000000..34209b47273c --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_destination_encoding.rb @@ -0,0 +1,27 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Encoding format for log events. + class ObservabilityPipelineKafkaDestinationEncoding + include BaseEnumModel + + JSON = "json".freeze + RAW_MESSAGE = "raw_message".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_kafka_destination_type.rb b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_destination_type.rb new file mode 100644 index 000000000000..0b2f5afbda5f --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_destination_type.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The destination type. The value should always be `kafka`. + class ObservabilityPipelineKafkaDestinationType + include BaseEnumModel + + KAFKA = "kafka".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_kafka_source_librdkafka_option.rb b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_librdkafka_option.rb similarity index 95% rename from lib/datadog_api_client/v2/models/observability_pipeline_kafka_source_librdkafka_option.rb rename to lib/datadog_api_client/v2/models/observability_pipeline_kafka_librdkafka_option.rb index b911c8aaf19a..bf29a784d49c 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_kafka_source_librdkafka_option.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_librdkafka_option.rb @@ -17,8 +17,8 @@ require 'time' module DatadogAPIClient::V2 - # Represents a key-value pair used to configure low-level `librdkafka` client options for Kafka sources, such as timeouts, buffer sizes, and security settings. - class ObservabilityPipelineKafkaSourceLibrdkafkaOption + # Represents a key-value pair used to configure low-level `librdkafka` client options for Kafka source and destination, such as timeouts, buffer sizes, and security settings. + class ObservabilityPipelineKafkaLibrdkafkaOption include BaseGenericModel # The name of the `librdkafka` configuration option to set. @@ -52,7 +52,7 @@ def self.openapi_types # @!visibility private def initialize(attributes = {}) if (!attributes.is_a?(Hash)) - fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineKafkaSourceLibrdkafkaOption` initialize method" + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineKafkaLibrdkafkaOption` initialize method" end self.additional_properties = {} diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_kafka_source_sasl.rb b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_sasl.rb similarity index 95% rename from lib/datadog_api_client/v2/models/observability_pipeline_kafka_source_sasl.rb rename to lib/datadog_api_client/v2/models/observability_pipeline_kafka_sasl.rb index 886923a1868f..88fb7915b9da 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_kafka_source_sasl.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_sasl.rb @@ -18,7 +18,7 @@ module DatadogAPIClient::V2 # Specifies the SASL mechanism for authenticating with a Kafka cluster. - class ObservabilityPipelineKafkaSourceSasl + class ObservabilityPipelineKafkaSasl include BaseGenericModel # SASL mechanism used for Kafka authentication. @@ -38,7 +38,7 @@ def self.attribute_map # @!visibility private def self.openapi_types { - :'mechanism' => :'ObservabilityPipelinePipelineKafkaSourceSaslMechanism' + :'mechanism' => :'ObservabilityPipelineKafkaSaslMechanism' } end @@ -47,7 +47,7 @@ def self.openapi_types # @!visibility private def initialize(attributes = {}) if (!attributes.is_a?(Hash)) - fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineKafkaSourceSasl` initialize method" + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineKafkaSasl` initialize method" end self.additional_properties = {} diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_pipeline_kafka_source_sasl_mechanism.rb b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_sasl_mechanism.rb similarity index 92% rename from lib/datadog_api_client/v2/models/observability_pipeline_pipeline_kafka_source_sasl_mechanism.rb rename to lib/datadog_api_client/v2/models/observability_pipeline_kafka_sasl_mechanism.rb index 64187c5011f0..64cdb527b7ab 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_pipeline_kafka_source_sasl_mechanism.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_sasl_mechanism.rb @@ -18,7 +18,7 @@ module DatadogAPIClient::V2 # SASL mechanism used for Kafka authentication. - class ObservabilityPipelinePipelineKafkaSourceSaslMechanism + class ObservabilityPipelineKafkaSaslMechanism include BaseEnumModel PLAIN = "PLAIN".freeze diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_kafka_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_source.rb index 33c4b1649b15..c81abd61932c 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_kafka_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_source.rb @@ -64,8 +64,8 @@ def self.openapi_types { :'group_id' => :'String', :'id' => :'String', - :'librdkafka_options' => :'Array', - :'sasl' => :'ObservabilityPipelineKafkaSourceSasl', + :'librdkafka_options' => :'Array', + :'sasl' => :'ObservabilityPipelineKafkaSasl', :'tls' => :'ObservabilityPipelineTls', :'topics' => :'Array', :'type' => :'ObservabilityPipelineKafkaSourceType' diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_opentelemetry_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_opentelemetry_source.rb new file mode 100644 index 000000000000..5a5a488ebe1a --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_opentelemetry_source.rb @@ -0,0 +1,174 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The `opentelemetry` source receives telemetry data using the OpenTelemetry Protocol (OTLP) over gRPC and HTTP. + class ObservabilityPipelineOpentelemetrySource + include BaseGenericModel + + # Environment variable name containing the gRPC server address for receiving OTLP data. Must be a valid environment variable name (alphanumeric characters and underscores only). + attr_accessor :grpc_address_key + + # Environment variable name containing the HTTP server address for receiving OTLP data. Must be a valid environment variable name (alphanumeric characters and underscores only). + attr_accessor :http_address_key + + # The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + attr_reader :id + + # Configuration for enabling TLS encryption between the pipeline component and external services. + attr_accessor :tls + + # The source type. The value should always be `opentelemetry`. + attr_reader :type + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'grpc_address_key' => :'grpc_address_key', + :'http_address_key' => :'http_address_key', + :'id' => :'id', + :'tls' => :'tls', + :'type' => :'type' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'grpc_address_key' => :'String', + :'http_address_key' => :'String', + :'id' => :'String', + :'tls' => :'ObservabilityPipelineTls', + :'type' => :'ObservabilityPipelineOpentelemetrySourceType' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineOpentelemetrySource` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'grpc_address_key') + self.grpc_address_key = attributes[:'grpc_address_key'] + end + + if attributes.key?(:'http_address_key') + self.http_address_key = attributes[:'http_address_key'] + end + + if attributes.key?(:'id') + self.id = attributes[:'id'] + end + + if attributes.key?(:'tls') + self.tls = attributes[:'tls'] + end + + if attributes.key?(:'type') + self.type = attributes[:'type'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @id.nil? + return false if @type.nil? + true + end + + # Custom attribute writer method with validation + # @param id [Object] Object to be assigned + # @!visibility private + def id=(id) + if id.nil? + fail ArgumentError, 'invalid value for "id", id cannot be nil.' + end + @id = id + end + + # Custom attribute writer method with validation + # @param type [Object] Object to be assigned + # @!visibility private + def type=(type) + if type.nil? + fail ArgumentError, 'invalid value for "type", type cannot be nil.' + end + @type = type + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + grpc_address_key == o.grpc_address_key && + http_address_key == o.http_address_key && + id == o.id && + tls == o.tls && + type == o.type && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [grpc_address_key, http_address_key, id, tls, type, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_opentelemetry_source_type.rb b/lib/datadog_api_client/v2/models/observability_pipeline_opentelemetry_source_type.rb new file mode 100644 index 000000000000..30df02a5c2ef --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_opentelemetry_source_type.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The source type. The value should always be `opentelemetry`. + class ObservabilityPipelineOpentelemetrySourceType + include BaseEnumModel + + OPENTELEMETRY = "opentelemetry".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_parse_xml_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_parse_xml_processor.rb new file mode 100644 index 000000000000..4ef12815814e --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_parse_xml_processor.rb @@ -0,0 +1,298 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The `parse_xml` processor parses XML from a specified field and extracts it into the event. + class ObservabilityPipelineParseXMLProcessor + include BaseGenericModel + + # Whether to always use a text key for element content. + attr_accessor :always_use_text_key + + # The prefix to use for XML attributes in the parsed output. + attr_accessor :attr_prefix + + # The display name for a component. + attr_accessor :display_name + + # Whether this processor is enabled. + attr_reader :enabled + + # The name of the log field that contains an XML string. + attr_reader :field + + # The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components). + attr_reader :id + + # A Datadog search query used to determine which logs this processor targets. + attr_reader :include + + # Whether to include XML attributes in the parsed output. + attr_accessor :include_attr + + # Whether to parse boolean values from strings. + attr_accessor :parse_bool + + # Whether to parse null values. + attr_accessor :parse_null + + # Whether to parse numeric values from strings. + attr_accessor :parse_number + + # The key name to use for text content within XML elements. Must be at least 1 character if specified. + attr_reader :text_key + + # The processor type. The value should always be `parse_xml`. + attr_reader :type + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'always_use_text_key' => :'always_use_text_key', + :'attr_prefix' => :'attr_prefix', + :'display_name' => :'display_name', + :'enabled' => :'enabled', + :'field' => :'field', + :'id' => :'id', + :'include' => :'include', + :'include_attr' => :'include_attr', + :'parse_bool' => :'parse_bool', + :'parse_null' => :'parse_null', + :'parse_number' => :'parse_number', + :'text_key' => :'text_key', + :'type' => :'type' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'always_use_text_key' => :'Boolean', + :'attr_prefix' => :'String', + :'display_name' => :'String', + :'enabled' => :'Boolean', + :'field' => :'String', + :'id' => :'String', + :'include' => :'String', + :'include_attr' => :'Boolean', + :'parse_bool' => :'Boolean', + :'parse_null' => :'Boolean', + :'parse_number' => :'Boolean', + :'text_key' => :'String', + :'type' => :'ObservabilityPipelineParseXMLProcessorType' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineParseXMLProcessor` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'always_use_text_key') + self.always_use_text_key = attributes[:'always_use_text_key'] + end + + if attributes.key?(:'attr_prefix') + self.attr_prefix = attributes[:'attr_prefix'] + end + + if attributes.key?(:'display_name') + self.display_name = attributes[:'display_name'] + end + + if attributes.key?(:'enabled') + self.enabled = attributes[:'enabled'] + end + + if attributes.key?(:'field') + self.field = attributes[:'field'] + end + + if attributes.key?(:'id') + self.id = attributes[:'id'] + end + + if attributes.key?(:'include') + self.include = attributes[:'include'] + end + + if attributes.key?(:'include_attr') + self.include_attr = attributes[:'include_attr'] + end + + if attributes.key?(:'parse_bool') + self.parse_bool = attributes[:'parse_bool'] + end + + if attributes.key?(:'parse_null') + self.parse_null = attributes[:'parse_null'] + end + + if attributes.key?(:'parse_number') + self.parse_number = attributes[:'parse_number'] + end + + if attributes.key?(:'text_key') + self.text_key = attributes[:'text_key'] + end + + if attributes.key?(:'type') + self.type = attributes[:'type'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @enabled.nil? + return false if @field.nil? + return false if @id.nil? + return false if @include.nil? + return false if !@text_key.nil? && @text_key.to_s.length < 1 + return false if @type.nil? + true + end + + # Custom attribute writer method with validation + # @param enabled [Object] Object to be assigned + # @!visibility private + def enabled=(enabled) + if enabled.nil? + fail ArgumentError, 'invalid value for "enabled", enabled cannot be nil.' + end + @enabled = enabled + end + + # Custom attribute writer method with validation + # @param field [Object] Object to be assigned + # @!visibility private + def field=(field) + if field.nil? + fail ArgumentError, 'invalid value for "field", field cannot be nil.' + end + @field = field + end + + # Custom attribute writer method with validation + # @param id [Object] Object to be assigned + # @!visibility private + def id=(id) + if id.nil? + fail ArgumentError, 'invalid value for "id", id cannot be nil.' + end + @id = id + end + + # Custom attribute writer method with validation + # @param include [Object] Object to be assigned + # @!visibility private + def include=(include) + if include.nil? + fail ArgumentError, 'invalid value for "include", include cannot be nil.' + end + @include = include + end + + # Custom attribute writer method with validation + # @param text_key [Object] Object to be assigned + # @!visibility private + def text_key=(text_key) + if !text_key.nil? && text_key.to_s.length < 1 + fail ArgumentError, 'invalid value for "text_key", the character length must be great than or equal to 1.' + end + @text_key = text_key + end + + # Custom attribute writer method with validation + # @param type [Object] Object to be assigned + # @!visibility private + def type=(type) + if type.nil? + fail ArgumentError, 'invalid value for "type", type cannot be nil.' + end + @type = type + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + always_use_text_key == o.always_use_text_key && + attr_prefix == o.attr_prefix && + display_name == o.display_name && + enabled == o.enabled && + field == o.field && + id == o.id && + include == o.include && + include_attr == o.include_attr && + parse_bool == o.parse_bool && + parse_null == o.parse_null && + parse_number == o.parse_number && + text_key == o.text_key && + type == o.type && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [always_use_text_key, attr_prefix, display_name, enabled, field, id, include, include_attr, parse_bool, parse_null, parse_number, text_key, type, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_parse_xml_processor_type.rb b/lib/datadog_api_client/v2/models/observability_pipeline_parse_xml_processor_type.rb new file mode 100644 index 000000000000..c75fd6adfa84 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_parse_xml_processor_type.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The processor type. The value should always be `parse_xml`. + class ObservabilityPipelineParseXMLProcessorType + include BaseEnumModel + + PARSE_XML = "parse_xml".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_quota_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_quota_processor.rb index 48b8ef8622a8..c644c39bd371 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_quota_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_quota_processor.rb @@ -24,7 +24,7 @@ class ObservabilityPipelineQuotaProcessor # The display name for a component. attr_accessor :display_name - # If set to `true`, logs that matched the quota filter and sent after the quota has been met are dropped; only logs that did not match the filter query continue through the pipeline. + # If set to `true`, logs that match the quota filter and are sent after the quota is exceeded are dropped. Logs that do not match the filter continue through the pipeline. **Note**: You can set either `drop_events` or `overflow_action`, but not both. attr_accessor :drop_events # Whether this processor is enabled. @@ -45,7 +45,7 @@ class ObservabilityPipelineQuotaProcessor # Name of the quota. attr_reader :name - # The action to take when the quota is exceeded. Options: + # The action to take when the quota or bucket limit is exceeded. Options: # - `drop`: Drop the event. # - `no_action`: Let the event pass through. # - `overflow_routing`: Route to an overflow destination. @@ -57,6 +57,12 @@ class ObservabilityPipelineQuotaProcessor # A list of fields used to segment log traffic for quota enforcement. Quotas are tracked independently by unique combinations of these field values. attr_accessor :partition_fields + # The action to take when the quota or bucket limit is exceeded. Options: + # - `drop`: Drop the event. + # - `no_action`: Let the event pass through. + # - `overflow_routing`: Route to an overflow destination. + attr_accessor :too_many_buckets_action + # The processor type. The value should always be `quota`. attr_reader :type @@ -77,6 +83,7 @@ def self.attribute_map :'overflow_action' => :'overflow_action', :'overrides' => :'overrides', :'partition_fields' => :'partition_fields', + :'too_many_buckets_action' => :'too_many_buckets_action', :'type' => :'type' } end @@ -96,6 +103,7 @@ def self.openapi_types :'overflow_action' => :'ObservabilityPipelineQuotaProcessorOverflowAction', :'overrides' => :'Array', :'partition_fields' => :'Array', + :'too_many_buckets_action' => :'ObservabilityPipelineQuotaProcessorOverflowAction', :'type' => :'ObservabilityPipelineQuotaProcessorType' } end @@ -166,6 +174,10 @@ def initialize(attributes = {}) end end + if attributes.key?(:'too_many_buckets_action') + self.too_many_buckets_action = attributes[:'too_many_buckets_action'] + end + if attributes.key?(:'type') self.type = attributes[:'type'] end @@ -281,6 +293,7 @@ def ==(o) overflow_action == o.overflow_action && overrides == o.overrides && partition_fields == o.partition_fields && + too_many_buckets_action == o.too_many_buckets_action && type == o.type && additional_properties == o.additional_properties end @@ -289,7 +302,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [display_name, drop_events, enabled, id, ignore_when_missing_partitions, include, limit, name, overflow_action, overrides, partition_fields, type, additional_properties].hash + [display_name, drop_events, enabled, id, ignore_when_missing_partitions, include, limit, name, overflow_action, overrides, partition_fields, too_many_buckets_action, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_quota_processor_overflow_action.rb b/lib/datadog_api_client/v2/models/observability_pipeline_quota_processor_overflow_action.rb index 4990af20ad61..06bc2d264f1f 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_quota_processor_overflow_action.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_quota_processor_overflow_action.rb @@ -17,7 +17,7 @@ require 'time' module DatadogAPIClient::V2 - # The action to take when the quota is exceeded. Options: + # The action to take when the quota or bucket limit is exceeded. Options: # - `drop`: Drop the event. # - `no_action`: Let the event pass through. # - `overflow_routing`: Route to an overflow destination. diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_sample_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_sample_processor.rb index e8e7e7af2b7b..9b191510677b 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_sample_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_sample_processor.rb @@ -27,6 +27,9 @@ class ObservabilityPipelineSampleProcessor # Whether this processor is enabled. attr_reader :enabled + # Optional list of fields to group events by. Each group is sampled independently. + attr_reader :group_by + # The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components). attr_reader :id @@ -34,10 +37,7 @@ class ObservabilityPipelineSampleProcessor attr_reader :include # The percentage of logs to sample. - attr_accessor :percentage - - # Number of events to sample (1 in N). - attr_reader :rate + attr_reader :percentage # The processor type. The value should always be `sample`. attr_reader :type @@ -50,10 +50,10 @@ def self.attribute_map { :'display_name' => :'display_name', :'enabled' => :'enabled', + :'group_by' => :'group_by', :'id' => :'id', :'include' => :'include', :'percentage' => :'percentage', - :'rate' => :'rate', :'type' => :'type' } end @@ -64,10 +64,10 @@ def self.openapi_types { :'display_name' => :'String', :'enabled' => :'Boolean', + :'group_by' => :'Array', :'id' => :'String', :'include' => :'String', :'percentage' => :'Float', - :'rate' => :'Integer', :'type' => :'ObservabilityPipelineSampleProcessorType' } end @@ -98,6 +98,12 @@ def initialize(attributes = {}) self.enabled = attributes[:'enabled'] end + if attributes.key?(:'group_by') + if (value = attributes[:'group_by']).is_a?(Array) + self.group_by = value + end + end + if attributes.key?(:'id') self.id = attributes[:'id'] end @@ -110,10 +116,6 @@ def initialize(attributes = {}) self.percentage = attributes[:'percentage'] end - if attributes.key?(:'rate') - self.rate = attributes[:'rate'] - end - if attributes.key?(:'type') self.type = attributes[:'type'] end @@ -124,9 +126,10 @@ def initialize(attributes = {}) # @!visibility private def valid? return false if @enabled.nil? + return false if !@group_by.nil? && @group_by.length < 1 return false if @id.nil? return false if @include.nil? - return false if !@rate.nil? && @rate < 1 + return false if @percentage.nil? return false if @type.nil? true end @@ -141,6 +144,16 @@ def enabled=(enabled) @enabled = enabled end + # Custom attribute writer method with validation + # @param group_by [Object] Object to be assigned + # @!visibility private + def group_by=(group_by) + if !group_by.nil? && group_by.length < 1 + fail ArgumentError, 'invalid value for "group_by", number of items must be greater than or equal to 1.' + end + @group_by = group_by + end + # Custom attribute writer method with validation # @param id [Object] Object to be assigned # @!visibility private @@ -162,13 +175,13 @@ def include=(include) end # Custom attribute writer method with validation - # @param rate [Object] Object to be assigned + # @param percentage [Object] Object to be assigned # @!visibility private - def rate=(rate) - if !rate.nil? && rate < 1 - fail ArgumentError, 'invalid value for "rate", must be greater than or equal to 1.' + def percentage=(percentage) + if percentage.nil? + fail ArgumentError, 'invalid value for "percentage", percentage cannot be nil.' end - @rate = rate + @percentage = percentage end # Custom attribute writer method with validation @@ -209,10 +222,10 @@ def ==(o) self.class == o.class && display_name == o.display_name && enabled == o.enabled && + group_by == o.group_by && id == o.id && include == o.include && percentage == o.percentage && - rate == o.rate && type == o.type && additional_properties == o.additional_properties end @@ -221,7 +234,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [display_name, enabled, id, include, percentage, rate, type, additional_properties].hash + [display_name, enabled, group_by, id, include, percentage, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_sensitive_data_scanner_processor_custom_pattern_options.rb b/lib/datadog_api_client/v2/models/observability_pipeline_sensitive_data_scanner_processor_custom_pattern_options.rb index e7da9561356a..bd5edd5fb419 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_sensitive_data_scanner_processor_custom_pattern_options.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_sensitive_data_scanner_processor_custom_pattern_options.rb @@ -21,6 +21,9 @@ module DatadogAPIClient::V2 class ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions include BaseGenericModel + # Human-readable description providing context about a sensitive data scanner rule + attr_accessor :description + # A regular expression used to detect sensitive values. Must be a valid regex. attr_reader :rule @@ -30,6 +33,7 @@ class ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions # @!visibility private def self.attribute_map { + :'description' => :'description', :'rule' => :'rule' } end @@ -38,6 +42,7 @@ def self.attribute_map # @!visibility private def self.openapi_types { + :'description' => :'String', :'rule' => :'String' } end @@ -60,6 +65,10 @@ def initialize(attributes = {}) end } + if attributes.key?(:'description') + self.description = attributes[:'description'] + end + if attributes.key?(:'rule') self.rule = attributes[:'rule'] end @@ -109,6 +118,7 @@ def to_hash def ==(o) return true if self.equal?(o) self.class == o.class && + description == o.description && rule == o.rule && additional_properties == o.additional_properties end @@ -117,7 +127,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [rule, additional_properties].hash + [description, rule, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_sensitive_data_scanner_processor_library_pattern_options.rb b/lib/datadog_api_client/v2/models/observability_pipeline_sensitive_data_scanner_processor_library_pattern_options.rb index d565d68bb035..b4c3b6d952b4 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_sensitive_data_scanner_processor_library_pattern_options.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_sensitive_data_scanner_processor_library_pattern_options.rb @@ -21,6 +21,9 @@ module DatadogAPIClient::V2 class ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions include BaseGenericModel + # Human-readable description providing context about a sensitive data scanner rule + attr_accessor :description + # Identifier for a predefined pattern from the sensitive data scanner pattern library. attr_reader :id @@ -33,6 +36,7 @@ class ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions # @!visibility private def self.attribute_map { + :'description' => :'description', :'id' => :'id', :'use_recommended_keywords' => :'use_recommended_keywords' } @@ -42,6 +46,7 @@ def self.attribute_map # @!visibility private def self.openapi_types { + :'description' => :'String', :'id' => :'String', :'use_recommended_keywords' => :'Boolean' } @@ -65,6 +70,10 @@ def initialize(attributes = {}) end } + if attributes.key?(:'description') + self.description = attributes[:'description'] + end + if attributes.key?(:'id') self.id = attributes[:'id'] end @@ -118,6 +127,7 @@ def to_hash def ==(o) return true if self.equal?(o) self.class == o.class && + description == o.description && id == o.id && use_recommended_keywords == o.use_recommended_keywords && additional_properties == o.additional_properties @@ -127,7 +137,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [id, use_recommended_keywords, additional_properties].hash + [description, id, use_recommended_keywords, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_split_array_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_split_array_processor.rb new file mode 100644 index 000000000000..43ad55c6ebc6 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_split_array_processor.rb @@ -0,0 +1,227 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The `split_array` processor splits array fields into separate events based on configured rules. + class ObservabilityPipelineSplitArrayProcessor + include BaseGenericModel + + # A list of array split configurations. + attr_reader :arrays + + # The display name for a component. + attr_accessor :display_name + + # Whether this processor is enabled. + attr_reader :enabled + + # The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components). + attr_reader :id + + # A Datadog search query used to determine which logs this processor targets. For split_array, this should typically be `*`. + attr_reader :include + + # The processor type. The value should always be `split_array`. + attr_reader :type + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'arrays' => :'arrays', + :'display_name' => :'display_name', + :'enabled' => :'enabled', + :'id' => :'id', + :'include' => :'include', + :'type' => :'type' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'arrays' => :'Array', + :'display_name' => :'String', + :'enabled' => :'Boolean', + :'id' => :'String', + :'include' => :'String', + :'type' => :'ObservabilityPipelineSplitArrayProcessorType' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineSplitArrayProcessor` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'arrays') + if (value = attributes[:'arrays']).is_a?(Array) + self.arrays = value + end + end + + if attributes.key?(:'display_name') + self.display_name = attributes[:'display_name'] + end + + if attributes.key?(:'enabled') + self.enabled = attributes[:'enabled'] + end + + if attributes.key?(:'id') + self.id = attributes[:'id'] + end + + if attributes.key?(:'include') + self.include = attributes[:'include'] + end + + if attributes.key?(:'type') + self.type = attributes[:'type'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @arrays.nil? + return false if @arrays.length > 15 + return false if @arrays.length < 1 + return false if @enabled.nil? + return false if @id.nil? + return false if @include.nil? + return false if @type.nil? + true + end + + # Custom attribute writer method with validation + # @param arrays [Object] Object to be assigned + # @!visibility private + def arrays=(arrays) + if arrays.nil? + fail ArgumentError, 'invalid value for "arrays", arrays cannot be nil.' + end + if arrays.length > 15 + fail ArgumentError, 'invalid value for "arrays", number of items must be less than or equal to 15.' + end + if arrays.length < 1 + fail ArgumentError, 'invalid value for "arrays", number of items must be greater than or equal to 1.' + end + @arrays = arrays + end + + # Custom attribute writer method with validation + # @param enabled [Object] Object to be assigned + # @!visibility private + def enabled=(enabled) + if enabled.nil? + fail ArgumentError, 'invalid value for "enabled", enabled cannot be nil.' + end + @enabled = enabled + end + + # Custom attribute writer method with validation + # @param id [Object] Object to be assigned + # @!visibility private + def id=(id) + if id.nil? + fail ArgumentError, 'invalid value for "id", id cannot be nil.' + end + @id = id + end + + # Custom attribute writer method with validation + # @param include [Object] Object to be assigned + # @!visibility private + def include=(include) + if include.nil? + fail ArgumentError, 'invalid value for "include", include cannot be nil.' + end + @include = include + end + + # Custom attribute writer method with validation + # @param type [Object] Object to be assigned + # @!visibility private + def type=(type) + if type.nil? + fail ArgumentError, 'invalid value for "type", type cannot be nil.' + end + @type = type + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + arrays == o.arrays && + display_name == o.display_name && + enabled == o.enabled && + id == o.id && + include == o.include && + type == o.type && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [arrays, display_name, enabled, id, include, type, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_split_array_processor_array_config.rb b/lib/datadog_api_client/v2/models/observability_pipeline_split_array_processor_array_config.rb new file mode 100644 index 000000000000..50595a668515 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_split_array_processor_array_config.rb @@ -0,0 +1,144 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Configuration for a single array split operation. + class ObservabilityPipelineSplitArrayProcessorArrayConfig + include BaseGenericModel + + # The path to the array field to split. + attr_reader :field + + # A Datadog search query used to determine which logs this array split operation targets. + attr_reader :include + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'field' => :'field', + :'include' => :'include' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'field' => :'String', + :'include' => :'String' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineSplitArrayProcessorArrayConfig` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'field') + self.field = attributes[:'field'] + end + + if attributes.key?(:'include') + self.include = attributes[:'include'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @field.nil? + return false if @include.nil? + true + end + + # Custom attribute writer method with validation + # @param field [Object] Object to be assigned + # @!visibility private + def field=(field) + if field.nil? + fail ArgumentError, 'invalid value for "field", field cannot be nil.' + end + @field = field + end + + # Custom attribute writer method with validation + # @param include [Object] Object to be assigned + # @!visibility private + def include=(include) + if include.nil? + fail ArgumentError, 'invalid value for "include", include cannot be nil.' + end + @include = include + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + field == o.field && + include == o.include && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [field, include, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_split_array_processor_type.rb b/lib/datadog_api_client/v2/models/observability_pipeline_split_array_processor_type.rb new file mode 100644 index 000000000000..fcb416f78acb --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_split_array_processor_type.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The processor type. The value should always be `split_array`. + class ObservabilityPipelineSplitArrayProcessorType + include BaseEnumModel + + SPLIT_ARRAY = "split_array".freeze + end +end