From e2d6947659ea32d3dcfcd9939ec2fa4c23615c5a Mon Sep 17 00:00:00 2001 From: "ci.datadog-api-spec" Date: Mon, 21 Apr 2025 16:38:33 +0000 Subject: [PATCH] Regenerate client from commit 6f649d92 of spec repo --- .apigentools-info | 8 +- .generator/schemas/v2/openapi.yaml | 274 +++++++++++++++++- lib/datadog_api_client/inflector.rb | 13 + lib/datadog_api_client/v2/model_base.rb | 6 +- .../models/observability_pipeline_config.rb | 13 +- ...vability_pipeline_config_processor_item.rb | 4 +- ...servability_pipeline_config_source_item.rb | 4 +- ...rvability_pipeline_datadog_agent_source.rb | 2 +- .../models/observability_pipeline_decoding.rb | 29 ++ .../observability_pipeline_fluent_source.rb | 154 ++++++++++ ...servability_pipeline_fluent_source_type.rb | 26 ++ ...servability_pipeline_http_server_source.rb | 196 +++++++++++++ ...peline_http_server_source_auth_strategy.rb | 27 ++ ...bility_pipeline_http_server_source_type.rb | 26 ++ .../observability_pipeline_kafka_source.rb | 2 +- ...rvability_pipeline_parse_grok_processor.rb | 221 ++++++++++++++ ...lity_pipeline_parse_grok_processor_rule.rb | 173 +++++++++++ ...ne_parse_grok_processor_rule_match_rule.rb | 145 +++++++++ ..._parse_grok_processor_rule_support_rule.rb | 144 +++++++++ ...lity_pipeline_parse_grok_processor_type.rb | 26 ++ .../observability_pipeline_quota_processor.rb | 2 +- ...observability_pipeline_sample_processor.rb | 219 ++++++++++++++ ...vability_pipeline_sample_processor_type.rb | 26 ++ .../v2/models/observability_pipeline_tls.rb | 2 +- 24 files changed, 1712 insertions(+), 30 deletions(-) create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_decoding.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_fluent_source.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_fluent_source_type.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_http_server_source.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_http_server_source_auth_strategy.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_http_server_source_type.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_parse_grok_processor.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_parse_grok_processor_rule.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_parse_grok_processor_rule_match_rule.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_parse_grok_processor_rule_support_rule.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_parse_grok_processor_type.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_sample_processor.rb create mode 100644 lib/datadog_api_client/v2/models/observability_pipeline_sample_processor_type.rb diff --git a/.apigentools-info b/.apigentools-info index 097b85f62f46..30a74776c299 100644 --- a/.apigentools-info +++ b/.apigentools-info @@ -4,13 +4,13 @@ "spec_versions": { "v1": { "apigentools_version": "1.6.6", - "regenerated": "2025-04-17 13:26:11.488046", - "spec_repo_commit": "12ab5180" + "regenerated": "2025-04-21 16:38:01.704170", + "spec_repo_commit": "6f649d92" }, "v2": { "apigentools_version": "1.6.6", - "regenerated": "2025-04-17 13:26:11.504561", - "spec_repo_commit": "12ab5180" + "regenerated": "2025-04-21 16:38:01.798291", + "spec_repo_commit": "6f649d92" } } } \ No newline at end of file diff --git a/.generator/schemas/v2/openapi.yaml b/.generator/schemas/v2/openapi.yaml index a191ada020a0..6414c601526c 100644 --- a/.generator/schemas/v2/openapi.yaml +++ b/.generator/schemas/v2/openapi.yaml @@ -22576,7 +22576,6 @@ components: type: array required: - sources - - processors - destinations type: object ObservabilityPipelineConfigDestinationItem: @@ -22592,11 +22591,15 @@ components: - $ref: '#/components/schemas/ObservabilityPipelineAddFieldsProcessor' - $ref: '#/components/schemas/ObservabilityPipelineRemoveFieldsProcessor' - $ref: '#/components/schemas/ObservabilityPipelineRenameFieldsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineSampleProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineParseGrokProcessor' ObservabilityPipelineConfigSourceItem: description: A data source for the pipeline. oneOf: - $ref: '#/components/schemas/ObservabilityPipelineKafkaSource' - $ref: '#/components/schemas/ObservabilityPipelineDatadogAgentSource' + - $ref: '#/components/schemas/ObservabilityPipelineFluentSource' + - $ref: '#/components/schemas/ObservabilityPipelineHttpServerSource' ObservabilityPipelineCreateRequest: description: Top-level schema representing a pipeline. properties: @@ -22711,6 +22714,20 @@ components: type: string x-enum-varnames: - DATADOG_LOGS + ObservabilityPipelineDecoding: + description: The decoding format used to interpret incoming logs. + enum: + - bytes + - gelf + - json + - syslog + example: json + type: string + x-enum-varnames: + - DECODE_BYTES + - DECODE_GELF + - DECODE_JSON + - DECODE_SYSLOG ObservabilityPipelineFieldValue: description: Represents a static key-value pair used in various processors. properties: @@ -22768,6 +22785,73 @@ components: type: string x-enum-varnames: - FILTER + ObservabilityPipelineFluentSource: + description: The `fluent` source ingests logs from a Fluentd-compatible service. + properties: + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (for example, as the `input` + to downstream components). + example: fluent-source + type: string + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + type: + $ref: '#/components/schemas/ObservabilityPipelineFluentSourceType' + required: + - id + - type + type: object + ObservabilityPipelineFluentSourceType: + default: fluent + description: The source type. The value should always be `fluent`. + enum: + - fluent + example: fluent + type: string + x-enum-varnames: + - FLUENT + ObservabilityPipelineHttpServerSource: + description: The `http_server` source collects logs over HTTP POST from external + services. + properties: + auth_strategy: + $ref: '#/components/schemas/ObservabilityPipelineHttpServerSourceAuthStrategy' + decoding: + $ref: '#/components/schemas/ObservabilityPipelineDecoding' + id: + description: Unique ID for the HTTP server source. + example: http-server-source + type: string + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + type: + $ref: '#/components/schemas/ObservabilityPipelineHttpServerSourceType' + required: + - id + - type + - auth_strategy + - decoding + type: object + ObservabilityPipelineHttpServerSourceAuthStrategy: + description: HTTP authentication method. + enum: + - none + - plain + example: plain + type: string + x-enum-varnames: + - NONE + - PLAIN + ObservabilityPipelineHttpServerSourceType: + default: http_server + description: The source type. The value should always be `http_server`. + enum: + - http_server + example: http_server + type: string + x-enum-varnames: + - HTTP_SERVER ObservabilityPipelineKafkaSource: description: The `kafka` source ingests data from Apache Kafka topics. properties: @@ -22841,6 +22925,136 @@ components: type: string x-enum-varnames: - KAFKA + ObservabilityPipelineParseGrokProcessor: + description: The `parse_grok` processor extracts structured fields from unstructured + log messages using Grok patterns. + properties: + disable_library_rules: + default: false + description: If set to `true`, disables the default Grok rules provided + by Datadog. + example: true + type: boolean + id: + description: A unique identifier for this processor. + example: parse-grok-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - datadog-agent-source + items: + type: string + type: array + rules: + description: The list of Grok parsing rules. If multiple matching rules + are provided, they are evaluated in order. The first successful match + is applied. + items: + $ref: '#/components/schemas/ObservabilityPipelineParseGrokProcessorRule' + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineParseGrokProcessorType' + required: + - id + - type + - include + - inputs + - rules + type: object + ObservabilityPipelineParseGrokProcessorRule: + description: 'A Grok parsing rule used in the `parse_grok` processor. Each rule + defines how to extract structured fields + + from a specific log field using Grok patterns. + + ' + properties: + match_rules: + description: 'A list of Grok parsing rules that define how to extract fields + from the source field. + + Each rule must contain a name and a valid Grok pattern. + + ' + example: + - name: MyParsingRule + rule: '%{word:user} connected on %{date("MM/dd/yyyy"):date}' + items: + $ref: '#/components/schemas/ObservabilityPipelineParseGrokProcessorRuleMatchRule' + type: array + source: + description: The name of the field in the log event to apply the Grok rules + to. + example: message + type: string + support_rules: + description: 'A list of Grok helper rules that can be referenced by the + parsing rules. + + ' + example: + - name: user + rule: '%{word:user.name}' + items: + $ref: '#/components/schemas/ObservabilityPipelineParseGrokProcessorRuleSupportRule' + type: array + required: + - source + - match_rules + - support_rules + type: object + ObservabilityPipelineParseGrokProcessorRuleMatchRule: + description: 'Defines a Grok parsing rule, which extracts structured fields + from log content using named Grok patterns. + + Each rule must have a unique name and a valid Datadog Grok pattern that will + be applied to the source field. + + ' + properties: + name: + description: The name of the rule. + example: MyParsingRule + type: string + rule: + description: The definition of the Grok rule. + example: '%{word:user} connected on %{date("MM/dd/yyyy"):date}' + type: string + required: + - name + - rule + type: object + ObservabilityPipelineParseGrokProcessorRuleSupportRule: + description: The Grok helper rule referenced in the parsing rules. + properties: + name: + description: The name of the Grok helper rule. + example: user + type: string + rule: + description: The definition of the Grok helper rule. + example: ' %{word:user.name}' + type: string + required: + - name + - rule + type: object + ObservabilityPipelineParseGrokProcessorType: + default: parse_grok + description: The processor type. The value should always be `parse_grok`. + enum: + - parse_grok + example: parse_grok + type: string + x-enum-varnames: + - PARSE_GROK ObservabilityPipelineParseJSONProcessor: description: The `parse_json` processor extracts JSON from a specified field and flattens it into the event. This is useful when logs contain embedded @@ -22935,8 +23149,8 @@ components: limit: $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorLimit' name: - description: Name for identifying the processor. - example: MyPipelineQuotaProcessor + description: Name of the quota. + example: MyQuota type: string overrides: description: A list of alternate quota rules that apply to specific sets @@ -23130,8 +23344,60 @@ components: type: string x-enum-varnames: - RENAME_FIELDS + ObservabilityPipelineSampleProcessor: + description: The `sample` processor allows probabilistic sampling of logs at + a fixed rate. + properties: + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (for example, as the `input` + to downstream components). + example: sample-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - datadog-agent-source + items: + type: string + type: array + percentage: + description: The percentage of logs to sample. + example: 10.0 + format: double + type: number + rate: + description: Number of events to sample (1 in N). + example: 10 + format: int64 + minimum: 1 + type: integer + type: + $ref: '#/components/schemas/ObservabilityPipelineSampleProcessorType' + required: + - id + - type + - include + - inputs + type: object + ObservabilityPipelineSampleProcessorType: + default: sample + description: The processor type. The value should always be `sample`. + enum: + - sample + example: sample + type: string + x-enum-varnames: + - SAMPLE ObservabilityPipelineTls: - description: Configuration for enabling TLS encryption. + description: Configuration for enabling TLS encryption between the pipeline + component and external services. properties: ca_file: description: "Path to the Certificate Authority (CA) file used to validate diff --git a/lib/datadog_api_client/inflector.rb b/lib/datadog_api_client/inflector.rb index be45ee7c8893..ffd5cf40d5e8 100644 --- a/lib/datadog_api_client/inflector.rb +++ b/lib/datadog_api_client/inflector.rb @@ -2318,13 +2318,24 @@ def overrides "v2.observability_pipeline_datadog_agent_source_type" => "ObservabilityPipelineDatadogAgentSourceType", "v2.observability_pipeline_datadog_logs_destination" => "ObservabilityPipelineDatadogLogsDestination", "v2.observability_pipeline_datadog_logs_destination_type" => "ObservabilityPipelineDatadogLogsDestinationType", + "v2.observability_pipeline_decoding" => "ObservabilityPipelineDecoding", "v2.observability_pipeline_field_value" => "ObservabilityPipelineFieldValue", "v2.observability_pipeline_filter_processor" => "ObservabilityPipelineFilterProcessor", "v2.observability_pipeline_filter_processor_type" => "ObservabilityPipelineFilterProcessorType", + "v2.observability_pipeline_fluent_source" => "ObservabilityPipelineFluentSource", + "v2.observability_pipeline_fluent_source_type" => "ObservabilityPipelineFluentSourceType", + "v2.observability_pipeline_http_server_source" => "ObservabilityPipelineHttpServerSource", + "v2.observability_pipeline_http_server_source_auth_strategy" => "ObservabilityPipelineHttpServerSourceAuthStrategy", + "v2.observability_pipeline_http_server_source_type" => "ObservabilityPipelineHttpServerSourceType", "v2.observability_pipeline_kafka_source" => "ObservabilityPipelineKafkaSource", "v2.observability_pipeline_kafka_source_librdkafka_option" => "ObservabilityPipelineKafkaSourceLibrdkafkaOption", "v2.observability_pipeline_kafka_source_sasl" => "ObservabilityPipelineKafkaSourceSasl", "v2.observability_pipeline_kafka_source_type" => "ObservabilityPipelineKafkaSourceType", + "v2.observability_pipeline_parse_grok_processor" => "ObservabilityPipelineParseGrokProcessor", + "v2.observability_pipeline_parse_grok_processor_rule" => "ObservabilityPipelineParseGrokProcessorRule", + "v2.observability_pipeline_parse_grok_processor_rule_match_rule" => "ObservabilityPipelineParseGrokProcessorRuleMatchRule", + "v2.observability_pipeline_parse_grok_processor_rule_support_rule" => "ObservabilityPipelineParseGrokProcessorRuleSupportRule", + "v2.observability_pipeline_parse_grok_processor_type" => "ObservabilityPipelineParseGrokProcessorType", "v2.observability_pipeline_parse_json_processor" => "ObservabilityPipelineParseJSONProcessor", "v2.observability_pipeline_parse_json_processor_type" => "ObservabilityPipelineParseJSONProcessorType", "v2.observability_pipeline_pipeline_kafka_source_sasl_mechanism" => "ObservabilityPipelinePipelineKafkaSourceSaslMechanism", @@ -2338,6 +2349,8 @@ def overrides "v2.observability_pipeline_rename_fields_processor" => "ObservabilityPipelineRenameFieldsProcessor", "v2.observability_pipeline_rename_fields_processor_field" => "ObservabilityPipelineRenameFieldsProcessorField", "v2.observability_pipeline_rename_fields_processor_type" => "ObservabilityPipelineRenameFieldsProcessorType", + "v2.observability_pipeline_sample_processor" => "ObservabilityPipelineSampleProcessor", + "v2.observability_pipeline_sample_processor_type" => "ObservabilityPipelineSampleProcessorType", "v2.observability_pipeline_tls" => "ObservabilityPipelineTls", "v2.okta_account" => "OktaAccount", "v2.okta_account_attributes" => "OktaAccountAttributes", diff --git a/lib/datadog_api_client/v2/model_base.rb b/lib/datadog_api_client/v2/model_base.rb index cfc8c7051560..7bdfde0c6bcd 100644 --- a/lib/datadog_api_client/v2/model_base.rb +++ b/lib/datadog_api_client/v2/model_base.rb @@ -124,7 +124,7 @@ def _deserialize(type, value) # generic array, return directly value when :UUID - value + value.to_s when /\AArray<(?.+)>\z/ inner_type = Regexp.last_match[:inner_type] value.map { |v| _deserialize(inner_type, v) } @@ -255,9 +255,7 @@ def find_and_cast_into_type(klass, data) when 'Object' # "type: object" return data if data.instance_of?(Hash) when 'UUID' - raise TypeError, "Expected String, got #{uuid_string.class.name} instead." unless uuid_string.kind_of?(String) - raise ArgumentError, "Invalid UUID format." unless /\A\h{8}-\h{4}-\h{4}-\h{4}-\h{12}\z/.match?(data) - return data + return UUIDTools::UUID.parse(data) when /\AArray<(?.+)>\z/ # "type: array" if data.instance_of?(Array) sub_type = Regexp.last_match[:sub_type] diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_config.rb b/lib/datadog_api_client/v2/models/observability_pipeline_config.rb index 694fc22da053..5f34d5e2da95 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_config.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_config.rb @@ -25,7 +25,7 @@ class ObservabilityPipelineConfig attr_reader :destinations # A list of processors that transform or enrich log data. - attr_reader :processors + attr_accessor :processors # A list of configured data sources for the pipeline. attr_reader :sources @@ -94,7 +94,6 @@ def initialize(attributes = {}) # @!visibility private def valid? return false if @destinations.nil? - return false if @processors.nil? return false if @sources.nil? true end @@ -109,16 +108,6 @@ def destinations=(destinations) @destinations = destinations end - # Custom attribute writer method with validation - # @param processors [Object] Object to be assigned - # @!visibility private - def processors=(processors) - if processors.nil? - fail ArgumentError, 'invalid value for "processors", processors cannot be nil.' - end - @processors = processors - end - # Custom attribute writer method with validation # @param sources [Object] Object to be assigned # @!visibility private diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_config_processor_item.rb b/lib/datadog_api_client/v2/models/observability_pipeline_config_processor_item.rb index 65f95cd2fee5..ee2ce0d03598 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_config_processor_item.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_config_processor_item.rb @@ -31,7 +31,9 @@ def openapi_one_of :'ObservabilityPipelineQuotaProcessor', :'ObservabilityPipelineAddFieldsProcessor', :'ObservabilityPipelineRemoveFieldsProcessor', - :'ObservabilityPipelineRenameFieldsProcessor' + :'ObservabilityPipelineRenameFieldsProcessor', + :'ObservabilityPipelineSampleProcessor', + :'ObservabilityPipelineParseGrokProcessor' ] end # Builds the object diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_config_source_item.rb b/lib/datadog_api_client/v2/models/observability_pipeline_config_source_item.rb index 7e1a5bd8a716..3cfdaa42f3fd 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_config_source_item.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_config_source_item.rb @@ -27,7 +27,9 @@ class << self def openapi_one_of [ :'ObservabilityPipelineKafkaSource', - :'ObservabilityPipelineDatadogAgentSource' + :'ObservabilityPipelineDatadogAgentSource', + :'ObservabilityPipelineFluentSource', + :'ObservabilityPipelineHttpServerSource' ] end # Builds the object diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_datadog_agent_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_datadog_agent_source.rb index 8ed4d8b3ade8..27b724373297 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_datadog_agent_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_datadog_agent_source.rb @@ -24,7 +24,7 @@ class ObservabilityPipelineDatadogAgentSource # The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). attr_reader :id - # Configuration for enabling TLS encryption. + # Configuration for enabling TLS encryption between the pipeline component and external services. attr_accessor :tls # The source type. The value should always be `datadog_agent`. diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_decoding.rb b/lib/datadog_api_client/v2/models/observability_pipeline_decoding.rb new file mode 100644 index 000000000000..e4a37fa66de2 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_decoding.rb @@ -0,0 +1,29 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The decoding format used to interpret incoming logs. + class ObservabilityPipelineDecoding + include BaseEnumModel + + DECODE_BYTES = "bytes".freeze + DECODE_GELF = "gelf".freeze + DECODE_JSON = "json".freeze + DECODE_SYSLOG = "syslog".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_fluent_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_fluent_source.rb new file mode 100644 index 000000000000..30f23ad7bb8a --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_fluent_source.rb @@ -0,0 +1,154 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The `fluent` source ingests logs from a Fluentd-compatible service. + class ObservabilityPipelineFluentSource + include BaseGenericModel + + # The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components). + attr_reader :id + + # Configuration for enabling TLS encryption between the pipeline component and external services. + attr_accessor :tls + + # The source type. The value should always be `fluent`. + attr_reader :type + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'id' => :'id', + :'tls' => :'tls', + :'type' => :'type' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'id' => :'String', + :'tls' => :'ObservabilityPipelineTls', + :'type' => :'ObservabilityPipelineFluentSourceType' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineFluentSource` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'id') + self.id = attributes[:'id'] + end + + if attributes.key?(:'tls') + self.tls = attributes[:'tls'] + end + + if attributes.key?(:'type') + self.type = attributes[:'type'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @id.nil? + return false if @type.nil? + true + end + + # Custom attribute writer method with validation + # @param id [Object] Object to be assigned + # @!visibility private + def id=(id) + if id.nil? + fail ArgumentError, 'invalid value for "id", id cannot be nil.' + end + @id = id + end + + # Custom attribute writer method with validation + # @param type [Object] Object to be assigned + # @!visibility private + def type=(type) + if type.nil? + fail ArgumentError, 'invalid value for "type", type cannot be nil.' + end + @type = type + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + id == o.id && + tls == o.tls && + type == o.type && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [id, tls, type, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_fluent_source_type.rb b/lib/datadog_api_client/v2/models/observability_pipeline_fluent_source_type.rb new file mode 100644 index 000000000000..d01c4f72fb48 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_fluent_source_type.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The source type. The value should always be `fluent`. + class ObservabilityPipelineFluentSourceType + include BaseEnumModel + + FLUENT = "fluent".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_http_server_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_http_server_source.rb new file mode 100644 index 000000000000..80c08b3a58d7 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_http_server_source.rb @@ -0,0 +1,196 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The `http_server` source collects logs over HTTP POST from external services. + class ObservabilityPipelineHttpServerSource + include BaseGenericModel + + # HTTP authentication method. + attr_reader :auth_strategy + + # The decoding format used to interpret incoming logs. + attr_reader :decoding + + # Unique ID for the HTTP server source. + attr_reader :id + + # Configuration for enabling TLS encryption between the pipeline component and external services. + attr_accessor :tls + + # The source type. The value should always be `http_server`. + attr_reader :type + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'auth_strategy' => :'auth_strategy', + :'decoding' => :'decoding', + :'id' => :'id', + :'tls' => :'tls', + :'type' => :'type' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'auth_strategy' => :'ObservabilityPipelineHttpServerSourceAuthStrategy', + :'decoding' => :'ObservabilityPipelineDecoding', + :'id' => :'String', + :'tls' => :'ObservabilityPipelineTls', + :'type' => :'ObservabilityPipelineHttpServerSourceType' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineHttpServerSource` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'auth_strategy') + self.auth_strategy = attributes[:'auth_strategy'] + end + + if attributes.key?(:'decoding') + self.decoding = attributes[:'decoding'] + end + + if attributes.key?(:'id') + self.id = attributes[:'id'] + end + + if attributes.key?(:'tls') + self.tls = attributes[:'tls'] + end + + if attributes.key?(:'type') + self.type = attributes[:'type'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @auth_strategy.nil? + return false if @decoding.nil? + return false if @id.nil? + return false if @type.nil? + true + end + + # Custom attribute writer method with validation + # @param auth_strategy [Object] Object to be assigned + # @!visibility private + def auth_strategy=(auth_strategy) + if auth_strategy.nil? + fail ArgumentError, 'invalid value for "auth_strategy", auth_strategy cannot be nil.' + end + @auth_strategy = auth_strategy + end + + # Custom attribute writer method with validation + # @param decoding [Object] Object to be assigned + # @!visibility private + def decoding=(decoding) + if decoding.nil? + fail ArgumentError, 'invalid value for "decoding", decoding cannot be nil.' + end + @decoding = decoding + end + + # Custom attribute writer method with validation + # @param id [Object] Object to be assigned + # @!visibility private + def id=(id) + if id.nil? + fail ArgumentError, 'invalid value for "id", id cannot be nil.' + end + @id = id + end + + # Custom attribute writer method with validation + # @param type [Object] Object to be assigned + # @!visibility private + def type=(type) + if type.nil? + fail ArgumentError, 'invalid value for "type", type cannot be nil.' + end + @type = type + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + auth_strategy == o.auth_strategy && + decoding == o.decoding && + id == o.id && + tls == o.tls && + type == o.type && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [auth_strategy, decoding, id, tls, type, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_http_server_source_auth_strategy.rb b/lib/datadog_api_client/v2/models/observability_pipeline_http_server_source_auth_strategy.rb new file mode 100644 index 000000000000..76974a4d2d3c --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_http_server_source_auth_strategy.rb @@ -0,0 +1,27 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # HTTP authentication method. + class ObservabilityPipelineHttpServerSourceAuthStrategy + include BaseEnumModel + + NONE = "none".freeze + PLAIN = "plain".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_http_server_source_type.rb b/lib/datadog_api_client/v2/models/observability_pipeline_http_server_source_type.rb new file mode 100644 index 000000000000..e93c133350f7 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_http_server_source_type.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The source type. The value should always be `http_server`. + class ObservabilityPipelineHttpServerSourceType + include BaseEnumModel + + HTTP_SERVER = "http_server".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_kafka_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_source.rb index 31fd26571bd6..33c4b1649b15 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_kafka_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_source.rb @@ -33,7 +33,7 @@ class ObservabilityPipelineKafkaSource # Specifies the SASL mechanism for authenticating with a Kafka cluster. attr_accessor :sasl - # Configuration for enabling TLS encryption. + # Configuration for enabling TLS encryption between the pipeline component and external services. attr_accessor :tls # A list of Kafka topic names to subscribe to. The source ingests messages from each topic specified. diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_parse_grok_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_parse_grok_processor.rb new file mode 100644 index 000000000000..2b39ee4555d6 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_parse_grok_processor.rb @@ -0,0 +1,221 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The `parse_grok` processor extracts structured fields from unstructured log messages using Grok patterns. + class ObservabilityPipelineParseGrokProcessor + include BaseGenericModel + + # If set to `true`, disables the default Grok rules provided by Datadog. + attr_accessor :disable_library_rules + + # A unique identifier for this processor. + attr_reader :id + + # A Datadog search query used to determine which logs this processor targets. + attr_reader :include + + # A list of component IDs whose output is used as the `input` for this component. + attr_reader :inputs + + # The list of Grok parsing rules. If multiple matching rules are provided, they are evaluated in order. The first successful match is applied. + attr_reader :rules + + # The processor type. The value should always be `parse_grok`. + attr_reader :type + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'disable_library_rules' => :'disable_library_rules', + :'id' => :'id', + :'include' => :'include', + :'inputs' => :'inputs', + :'rules' => :'rules', + :'type' => :'type' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'disable_library_rules' => :'Boolean', + :'id' => :'String', + :'include' => :'String', + :'inputs' => :'Array', + :'rules' => :'Array', + :'type' => :'ObservabilityPipelineParseGrokProcessorType' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineParseGrokProcessor` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'disable_library_rules') + self.disable_library_rules = attributes[:'disable_library_rules'] + end + + if attributes.key?(:'id') + self.id = attributes[:'id'] + end + + if attributes.key?(:'include') + self.include = attributes[:'include'] + end + + if attributes.key?(:'inputs') + if (value = attributes[:'inputs']).is_a?(Array) + self.inputs = value + end + end + + if attributes.key?(:'rules') + if (value = attributes[:'rules']).is_a?(Array) + self.rules = value + end + end + + if attributes.key?(:'type') + self.type = attributes[:'type'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @id.nil? + return false if @include.nil? + return false if @inputs.nil? + return false if @rules.nil? + return false if @type.nil? + true + end + + # Custom attribute writer method with validation + # @param id [Object] Object to be assigned + # @!visibility private + def id=(id) + if id.nil? + fail ArgumentError, 'invalid value for "id", id cannot be nil.' + end + @id = id + end + + # Custom attribute writer method with validation + # @param include [Object] Object to be assigned + # @!visibility private + def include=(include) + if include.nil? + fail ArgumentError, 'invalid value for "include", include cannot be nil.' + end + @include = include + end + + # Custom attribute writer method with validation + # @param inputs [Object] Object to be assigned + # @!visibility private + def inputs=(inputs) + if inputs.nil? + fail ArgumentError, 'invalid value for "inputs", inputs cannot be nil.' + end + @inputs = inputs + end + + # Custom attribute writer method with validation + # @param rules [Object] Object to be assigned + # @!visibility private + def rules=(rules) + if rules.nil? + fail ArgumentError, 'invalid value for "rules", rules cannot be nil.' + end + @rules = rules + end + + # Custom attribute writer method with validation + # @param type [Object] Object to be assigned + # @!visibility private + def type=(type) + if type.nil? + fail ArgumentError, 'invalid value for "type", type cannot be nil.' + end + @type = type + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + disable_library_rules == o.disable_library_rules && + id == o.id && + include == o.include && + inputs == o.inputs && + rules == o.rules && + type == o.type && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [disable_library_rules, id, include, inputs, rules, type, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_parse_grok_processor_rule.rb b/lib/datadog_api_client/v2/models/observability_pipeline_parse_grok_processor_rule.rb new file mode 100644 index 000000000000..fd3aac7b104b --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_parse_grok_processor_rule.rb @@ -0,0 +1,173 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # A Grok parsing rule used in the `parse_grok` processor. Each rule defines how to extract structured fields + # from a specific log field using Grok patterns. + class ObservabilityPipelineParseGrokProcessorRule + include BaseGenericModel + + # A list of Grok parsing rules that define how to extract fields from the source field. + # Each rule must contain a name and a valid Grok pattern. + # + attr_reader :match_rules + + # The name of the field in the log event to apply the Grok rules to. + attr_reader :source + + # A list of Grok helper rules that can be referenced by the parsing rules. + # + attr_reader :support_rules + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'match_rules' => :'match_rules', + :'source' => :'source', + :'support_rules' => :'support_rules' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'match_rules' => :'Array', + :'source' => :'String', + :'support_rules' => :'Array' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineParseGrokProcessorRule` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'match_rules') + if (value = attributes[:'match_rules']).is_a?(Array) + self.match_rules = value + end + end + + if attributes.key?(:'source') + self.source = attributes[:'source'] + end + + if attributes.key?(:'support_rules') + if (value = attributes[:'support_rules']).is_a?(Array) + self.support_rules = value + end + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @match_rules.nil? + return false if @source.nil? + return false if @support_rules.nil? + true + end + + # Custom attribute writer method with validation + # @param match_rules [Object] Object to be assigned + # @!visibility private + def match_rules=(match_rules) + if match_rules.nil? + fail ArgumentError, 'invalid value for "match_rules", match_rules cannot be nil.' + end + @match_rules = match_rules + end + + # Custom attribute writer method with validation + # @param source [Object] Object to be assigned + # @!visibility private + def source=(source) + if source.nil? + fail ArgumentError, 'invalid value for "source", source cannot be nil.' + end + @source = source + end + + # Custom attribute writer method with validation + # @param support_rules [Object] Object to be assigned + # @!visibility private + def support_rules=(support_rules) + if support_rules.nil? + fail ArgumentError, 'invalid value for "support_rules", support_rules cannot be nil.' + end + @support_rules = support_rules + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + match_rules == o.match_rules && + source == o.source && + support_rules == o.support_rules && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [match_rules, source, support_rules, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_parse_grok_processor_rule_match_rule.rb b/lib/datadog_api_client/v2/models/observability_pipeline_parse_grok_processor_rule_match_rule.rb new file mode 100644 index 000000000000..58ea214de40f --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_parse_grok_processor_rule_match_rule.rb @@ -0,0 +1,145 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Defines a Grok parsing rule, which extracts structured fields from log content using named Grok patterns. + # Each rule must have a unique name and a valid Datadog Grok pattern that will be applied to the source field. + class ObservabilityPipelineParseGrokProcessorRuleMatchRule + include BaseGenericModel + + # The name of the rule. + attr_reader :name + + # The definition of the Grok rule. + attr_reader :rule + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'name' => :'name', + :'rule' => :'rule' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'name' => :'String', + :'rule' => :'String' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineParseGrokProcessorRuleMatchRule` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'name') + self.name = attributes[:'name'] + end + + if attributes.key?(:'rule') + self.rule = attributes[:'rule'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @name.nil? + return false if @rule.nil? + true + end + + # Custom attribute writer method with validation + # @param name [Object] Object to be assigned + # @!visibility private + def name=(name) + if name.nil? + fail ArgumentError, 'invalid value for "name", name cannot be nil.' + end + @name = name + end + + # Custom attribute writer method with validation + # @param rule [Object] Object to be assigned + # @!visibility private + def rule=(rule) + if rule.nil? + fail ArgumentError, 'invalid value for "rule", rule cannot be nil.' + end + @rule = rule + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + name == o.name && + rule == o.rule && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [name, rule, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_parse_grok_processor_rule_support_rule.rb b/lib/datadog_api_client/v2/models/observability_pipeline_parse_grok_processor_rule_support_rule.rb new file mode 100644 index 000000000000..58bf731f6a9c --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_parse_grok_processor_rule_support_rule.rb @@ -0,0 +1,144 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The Grok helper rule referenced in the parsing rules. + class ObservabilityPipelineParseGrokProcessorRuleSupportRule + include BaseGenericModel + + # The name of the Grok helper rule. + attr_reader :name + + # The definition of the Grok helper rule. + attr_reader :rule + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'name' => :'name', + :'rule' => :'rule' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'name' => :'String', + :'rule' => :'String' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineParseGrokProcessorRuleSupportRule` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'name') + self.name = attributes[:'name'] + end + + if attributes.key?(:'rule') + self.rule = attributes[:'rule'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @name.nil? + return false if @rule.nil? + true + end + + # Custom attribute writer method with validation + # @param name [Object] Object to be assigned + # @!visibility private + def name=(name) + if name.nil? + fail ArgumentError, 'invalid value for "name", name cannot be nil.' + end + @name = name + end + + # Custom attribute writer method with validation + # @param rule [Object] Object to be assigned + # @!visibility private + def rule=(rule) + if rule.nil? + fail ArgumentError, 'invalid value for "rule", rule cannot be nil.' + end + @rule = rule + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + name == o.name && + rule == o.rule && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [name, rule, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_parse_grok_processor_type.rb b/lib/datadog_api_client/v2/models/observability_pipeline_parse_grok_processor_type.rb new file mode 100644 index 000000000000..fe43518b001d --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_parse_grok_processor_type.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The processor type. The value should always be `parse_grok`. + class ObservabilityPipelineParseGrokProcessorType + include BaseEnumModel + + PARSE_GROK = "parse_grok".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_quota_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_quota_processor.rb index 96b829cbd6b4..3db9b5162eb4 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_quota_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_quota_processor.rb @@ -39,7 +39,7 @@ class ObservabilityPipelineQuotaProcessor # The maximum amount of data or number of events allowed before the quota is enforced. Can be specified in bytes or events. attr_reader :limit - # Name for identifying the processor. + # Name of the quota. attr_reader :name # A list of alternate quota rules that apply to specific sets of events, identified by matching field values. Each override can define a custom limit. diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_sample_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_sample_processor.rb new file mode 100644 index 000000000000..32229c4bad55 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_sample_processor.rb @@ -0,0 +1,219 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The `sample` processor allows probabilistic sampling of logs at a fixed rate. + class ObservabilityPipelineSampleProcessor + include BaseGenericModel + + # The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components). + attr_reader :id + + # A Datadog search query used to determine which logs this processor targets. + attr_reader :include + + # A list of component IDs whose output is used as the `input` for this component. + attr_reader :inputs + + # The percentage of logs to sample. + attr_accessor :percentage + + # Number of events to sample (1 in N). + attr_reader :rate + + # The processor type. The value should always be `sample`. + attr_reader :type + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'id' => :'id', + :'include' => :'include', + :'inputs' => :'inputs', + :'percentage' => :'percentage', + :'rate' => :'rate', + :'type' => :'type' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'id' => :'String', + :'include' => :'String', + :'inputs' => :'Array', + :'percentage' => :'Float', + :'rate' => :'Integer', + :'type' => :'ObservabilityPipelineSampleProcessorType' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineSampleProcessor` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'id') + self.id = attributes[:'id'] + end + + if attributes.key?(:'include') + self.include = attributes[:'include'] + end + + if attributes.key?(:'inputs') + if (value = attributes[:'inputs']).is_a?(Array) + self.inputs = value + end + end + + if attributes.key?(:'percentage') + self.percentage = attributes[:'percentage'] + end + + if attributes.key?(:'rate') + self.rate = attributes[:'rate'] + end + + if attributes.key?(:'type') + self.type = attributes[:'type'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @id.nil? + return false if @include.nil? + return false if @inputs.nil? + return false if !@rate.nil? && @rate < 1 + return false if @type.nil? + true + end + + # Custom attribute writer method with validation + # @param id [Object] Object to be assigned + # @!visibility private + def id=(id) + if id.nil? + fail ArgumentError, 'invalid value for "id", id cannot be nil.' + end + @id = id + end + + # Custom attribute writer method with validation + # @param include [Object] Object to be assigned + # @!visibility private + def include=(include) + if include.nil? + fail ArgumentError, 'invalid value for "include", include cannot be nil.' + end + @include = include + end + + # Custom attribute writer method with validation + # @param inputs [Object] Object to be assigned + # @!visibility private + def inputs=(inputs) + if inputs.nil? + fail ArgumentError, 'invalid value for "inputs", inputs cannot be nil.' + end + @inputs = inputs + end + + # Custom attribute writer method with validation + # @param rate [Object] Object to be assigned + # @!visibility private + def rate=(rate) + if !rate.nil? && rate < 1 + fail ArgumentError, 'invalid value for "rate", must be greater than or equal to 1.' + end + @rate = rate + end + + # Custom attribute writer method with validation + # @param type [Object] Object to be assigned + # @!visibility private + def type=(type) + if type.nil? + fail ArgumentError, 'invalid value for "type", type cannot be nil.' + end + @type = type + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + id == o.id && + include == o.include && + inputs == o.inputs && + percentage == o.percentage && + rate == o.rate && + type == o.type && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [id, include, inputs, percentage, rate, type, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_sample_processor_type.rb b/lib/datadog_api_client/v2/models/observability_pipeline_sample_processor_type.rb new file mode 100644 index 000000000000..fdef23fcdb52 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_sample_processor_type.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The processor type. The value should always be `sample`. + class ObservabilityPipelineSampleProcessorType + include BaseEnumModel + + SAMPLE = "sample".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_tls.rb b/lib/datadog_api_client/v2/models/observability_pipeline_tls.rb index 665689da6821..7d6fdfd22a33 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_tls.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_tls.rb @@ -17,7 +17,7 @@ require 'time' module DatadogAPIClient::V2 - # Configuration for enabling TLS encryption. + # Configuration for enabling TLS encryption between the pipeline component and external services. class ObservabilityPipelineTls include BaseGenericModel