Skip to content

Commit 3431088

Browse files
therveci.datadog-api-spec
andauthored
Handle deprecation of APIs and attributes (#1170)
* Add deprecation warnings Add some mechanism for deprecated APIs and attributes. * Deprecated attrs * Add a note in APIs too * Minor doc formatting. * pre-commit fixes Co-authored-by: ci.datadog-api-spec <[email protected]>
1 parent 903b123 commit 3431088

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

42 files changed

+49
-83
lines changed

.generator/src/generator/formatter.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -96,7 +96,7 @@ def header(self, text, level, raw=None):
9696

9797

9898
def docstring(text):
99-
return m2r2.convert(text.replace("\\n", "\\\\n"), renderer=CustomRenderer())[1:-1].replace("\\ ", " ").replace("\\`", "\\\\`")
99+
return m2r2.convert(text.replace("\\n", "\\\\n"), renderer=CustomRenderer())[1:-1].replace("\\ ", " ").replace("\\`", "\\\\`").replace("\n\n\n", "\n\n")
100100

101101

102102
def _merge_imports(a, b):

.generator/src/generator/templates/api.j2

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@ from __future__ import annotations
33

44
import collections
55
from typing import Any, Dict, List, Union
6+
import warnings
67

78
from {{ package }}.api_client import ApiClient, Endpoint as _Endpoint
89
from {{ package }}.model_utils import (
@@ -141,7 +142,7 @@ class {{ classname }}:
141142
{%- for path, method, operation in operations|sort(attribute="2.operationId") %}
142143
{%- set returnType = operation|return_type %}
143144
def {{ operation.operationId|safe_snake_case }}(self, {% for name, parameter in operation|parameters if parameter.required %}{{name|attribute_name}}: {{ get_type_for_parameter(parameter, typing=True) }}, {% endfor %}{% for name, parameter in operation|parameters if not parameter.required %}{% if loop.first %}*, {% endif %}{{name|attribute_name}}: Union[{{ get_type_for_parameter(parameter, typing=True) }}, UnsetType]=unset, {% endfor %}) -> {% if returnType %}{{ returnType.replace("[", "List[") }}{% else %}None{% endif %}:
144-
"""{{ operation.summary|indent(8) }}.
145+
"""{{ operation.summary|indent(8) }}.{% if operation.deprecated %} **Deprecated**.{% endif %}
145146
{% if operation.description %}
146147
{{ operation.description|docstring|indent(8) }}
147148
{% endif %}
@@ -168,6 +169,9 @@ class {{ classname }}:
168169
kwargs["{{ name|attribute_name }}"] = {{ name|attribute_name }}
169170
{%- endif %}
170171
{% endfor %}
172+
{%- if operation.deprecated %}
173+
warnings.warn("{{ operation.operationId|safe_snake_case }} is deprecated", DeprecationWarning, stacklevel=2)
174+
{%- endif %}
171175
return self._{{ operation.operationId|safe_snake_case }}_endpoint.call_with_http_info(**kwargs)
172176
{%- if operation["x-pagination"] %}
173177
{%- set pagination = operation["x-pagination"] %}

.generator/src/generator/templates/model_generic.j2

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -107,7 +107,7 @@ class {{ name }}(ModelNormal):
107107
{{ model.description|docstring|indent(8) }}
108108
{%- for attr, definition in model.get("properties", {}).items() %}
109109
{# keep new line #}
110-
:param {{ attr|attribute_name }}: {{ definition.description|docstring|indent(12) }}
110+
:param {{ attr|attribute_name }}: {{ definition.description|docstring|indent(12) }}{% if definition.deprecated %} **Deprecated**.{% endif %}
111111
:type {{ attr|attribute_name }}: {{ get_type_for_attribute(model, attr, current_name=name) }}{% if definition.nullable %}, none_type{% endif %}{% if attr not in model.get("required", []) %}, optional{% endif %}
112112
{%- endfor %}
113113
"""

src/datadog_api_client/v1/api/aws_logs_integration_api.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -175,7 +175,6 @@ def check_aws_logs_lambda_async(
175175
is the same as for Enable an AWS service log collection. Subsequent requests will always repeat the above, so this
176176
endpoint can be polled intermittently instead of blocking.
177177
178-
179178
* Returns a status of 'created' when it's checking if the Lambda exists in the account.
180179
* Returns a status of 'waiting' while checking.
181180
* Returns a status of 'checked and ok' if the Lambda exists.
@@ -201,7 +200,6 @@ def check_aws_logs_services_async(
201200
Done async, so can be repeatedly polled in a non-blocking fashion until
202201
the async request completes.
203202
204-
205203
* Returns a status of ``created`` when it's checking if the permissions exists
206204
in the AWS account.
207205
* Returns a status of ``waiting`` while checking.

src/datadog_api_client/v1/api/events_api.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -196,7 +196,6 @@ def list_events(
196196
197197
**Notes** :
198198
199-
200199
*
201200
If the event you’re querying contains markdown formatting of any kind,
202201
you may see characters such as ``%`` , ``\\`` , ``n`` in your output.

src/datadog_api_client/v1/api/logs_api.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
from __future__ import annotations
55

66
from typing import Any, Dict, Union
7+
import warnings
78

89
from datadog_api_client.api_client import ApiClient, Endpoint as _Endpoint
910
from datadog_api_client.model_utils import (
@@ -158,18 +159,16 @@ def submit_log(
158159
content_encoding: Union[ContentEncoding, UnsetType] = unset,
159160
ddtags: Union[str, UnsetType] = unset,
160161
) -> dict:
161-
"""Send logs.
162+
"""Send logs. **Deprecated**.
162163
163164
Send your logs to your Datadog platform over HTTP. Limits per HTTP request are:
164165
165-
166166
* Maximum content size per payload (uncompressed): 5MB
167167
* Maximum size for a single log: 1MB
168168
* Maximum array size if sending multiple logs in an array: 1000 entries
169169
170170
Any log exceeding 1MB is accepted and truncated by Datadog:
171171
172-
173172
* For a single log request, the API truncates the log at 1MB and returns a 2xx.
174173
* For a multi-logs request, the API processes all logs, truncates only logs larger than 1MB, and returns a 2xx.
175174
@@ -178,7 +177,6 @@ def submit_log(
178177
179178
The status codes answered by the HTTP API are:
180179
181-
182180
* 200: OK
183181
* 400: Bad request (likely an issue in the payload formatting)
184182
* 403: Permission issue (likely using an invalid API Key)
@@ -202,4 +200,5 @@ def submit_log(
202200

203201
kwargs["body"] = body
204202

203+
warnings.warn("submit_log is deprecated", DeprecationWarning, stacklevel=2)
205204
return self._submit_log_endpoint.call_with_http_info(**kwargs)

src/datadog_api_client/v1/api/logs_pipelines_api.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,6 @@ class LogsPipelinesApi:
1616
Pipelines and processors operate on incoming logs, parsing
1717
and transforming them into structured attributes for easier querying.
1818
19-
2019
*
2120
See the `pipelines configuration page <https://app.datadoghq.com/logs/pipelines>`_
2221
for a list of the pipelines and processors currently configured in web UI.

src/datadog_api_client/v1/api/metrics_api.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,6 @@ class MetricsApi:
2525
"""
2626
The metrics endpoint allows you to:
2727
28-
2928
* Post metrics data so it can be graphed on Datadog’s dashboards
3029
* Query metrics from any time period
3130
* Modify tag configurations for metrics
@@ -374,7 +373,6 @@ def submit_metrics(
374373
375374
If you’re submitting metrics directly to the Datadog API without using DogStatsD, expect:
376375
377-
378376
* 64 bits for the timestamp
379377
* 64 bits for the value
380378
* 40 bytes for the metric names

src/datadog_api_client/v1/api/monitors_api.py

Lines changed: 0 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -382,7 +382,6 @@ def create_monitor(
382382
383383
The type of monitor chosen from:
384384
385-
386385
* anomaly: ``query alert``
387386
* APM: ``query alert`` or ``trace-analytics alert``
388387
* composite: ``composite``
@@ -410,7 +409,6 @@ def create_monitor(
410409
411410
Example: ``time_aggr(time_window):space_aggr:metric{tags} [by {key}] operator #``
412411
413-
414412
* ``time_aggr`` : avg, sum, max, min, change, or pct_change
415413
* ``time_window`` : ``last_#m`` (with ``#`` between 1 and 10080 depending on the monitor type) or ``last_#h`` (with ``#`` between 1 and 168 depending on the monitor type) or ``last_1d`` , or ``last_1w``
416414
* ``space_aggr`` : avg, sum, min, or max
@@ -422,7 +420,6 @@ def create_monitor(
422420
If you are using the ``_change_`` or ``_pct_change_`` time aggregator, instead use ``change_aggr(time_aggr(time_window),
423421
timeshift):space_aggr:metric{tags} [by {key}] operator #`` with:
424422
425-
426423
* ``change_aggr`` change, pct_change
427424
* ``time_aggr`` avg, sum, max, min `Learn more <https://docs.datadoghq.com/monitors/create/types/#define-the-conditions>`_
428425
* ``time_window`` last_#m (between 1 and 2880 depending on the monitor type), last_#h (between 1 and 48 depending on the monitor type), or last_#d (1 or 2)
@@ -435,7 +432,6 @@ def create_monitor(
435432
436433
Example: ``"check".over(tags).last(count).by(group).count_by_status()``
437434
438-
439435
* ``check`` name of the check, for example ``datadog.agent.up``
440436
* ``tags`` one or more quoted tags (comma-separated), or "*". for example: ``.over("env:prod", "role:db")`` ; ``over`` cannot be blank.
441437
* ``count`` must be at greater than or equal to your max threshold (defined in the ``options`` ). It is limited to 100.
@@ -447,7 +443,6 @@ def create_monitor(
447443
448444
Example: ``events('sources:nagios status:error,warning priority:normal tags: "string query"').rollup("count").last("1h")"``
449445
450-
451446
* ``event`` , the event query string:
452447
* ``string_query`` free text query to match against event title and text.
453448
* ``sources`` event sources (comma-separated).
@@ -465,7 +460,6 @@ def create_monitor(
465460
466461
Example: ``events(query).rollup(rollup_method[, measure]).last(time_window) operator #``
467462
468-
469463
* ``query`` The search query - following the `Log search syntax <https://docs.datadoghq.com/logs/search_syntax/>`_.
470464
* ``rollup_method`` The stats roll-up method - supports ``count`` , ``avg`` and ``cardinality``.
471465
* ``measure`` For ``avg`` and cardinality ``rollup_method`` - specify the measure or the facet name you want to use.
@@ -477,7 +471,6 @@ def create_monitor(
477471
478472
Example: ``processes(search).over(tags).rollup('count').last(timeframe) operator #``
479473
480-
481474
* ``search`` free text search string for querying processes.
482475
Matching processes match results on the `Live Processes <https://docs.datadoghq.com/infrastructure/process/?tab=linuxwindows>`_ page.
483476
* ``tags`` one or more tags (comma-separated)
@@ -489,7 +482,6 @@ def create_monitor(
489482
490483
Example: ``logs(query).index(index_name).rollup(rollup_method[, measure]).last(time_window) operator #``
491484
492-
493485
* ``query`` The search query - following the `Log search syntax <https://docs.datadoghq.com/logs/search_syntax/>`_.
494486
* ``index_name`` For multi-index organizations, the log index in which the request is performed.
495487
* ``rollup_method`` The stats roll-up method - supports ``count`` , ``avg`` and ``cardinality``.
@@ -502,7 +494,6 @@ def create_monitor(
502494
503495
Example: ``12345 && 67890`` , where ``12345`` and ``67890`` are the IDs of non-composite monitors
504496
505-
506497
* ``name`` [ *required* , *default* = **dynamic, based on query** ]: The name of the alert.
507498
* ``message`` [ *required* , *default* = **dynamic, based on query** ]: A message to include with notifications for this monitor.
508499
Email notifications can be sent to specific users by using the same '@username' notation as events.
@@ -514,7 +505,6 @@ def create_monitor(
514505
515506
Example: ``error_budget("slo_id").over("time_window") operator #``
516507
517-
518508
* ``slo_id`` : The alphanumeric SLO ID of the SLO you are configuring the alert for.
519509
* `time_window`: The time window of the SLO target you wish to alert on. Valid options: ``7d`` , ``30d`` , ``90d``.
520510
* ``operator`` : ``>=`` or ``>``
@@ -523,7 +513,6 @@ def create_monitor(
523513
524514
Example: ``audits(query).rollup(rollup_method[, measure]).last(time_window) operator #``
525515
526-
527516
* ``query`` The search query - following the `Log search syntax <https://docs.datadoghq.com/logs/search_syntax/>`_.
528517
* ``rollup_method`` The stats roll-up method - supports ``count`` , ``avg`` and ``cardinality``.
529518
* ``measure`` For ``avg`` and cardinality ``rollup_method`` - specify the measure or the facet name you want to use.
@@ -537,7 +526,6 @@ def create_monitor(
537526
538527
Example: ``ci-pipelines(query).rollup(rollup_method[, measure]).last(time_window) operator #``
539528
540-
541529
* ``query`` The search query - following the `Log search syntax <https://docs.datadoghq.com/logs/search_syntax/>`_.
542530
* ``rollup_method`` The stats roll-up method - supports ``count`` , ``avg`` , and ``cardinality``.
543531
* ``measure`` For ``avg`` and cardinality ``rollup_method`` - specify the measure or the facet name you want to use.
@@ -551,7 +539,6 @@ def create_monitor(
551539
552540
Example: ``ci-tests(query).rollup(rollup_method[, measure]).last(time_window) operator #``
553541
554-
555542
* ``query`` The search query - following the `Log search syntax <https://docs.datadoghq.com/logs/search_syntax/>`_.
556543
* ``rollup_method`` The stats roll-up method - supports ``count`` , ``avg`` , and ``cardinality``.
557544
* ``measure`` For ``avg`` and cardinality ``rollup_method`` - specify the measure or the facet name you want to use.
@@ -566,7 +553,6 @@ def create_monitor(
566553
Example(RUM): ``error-tracking-rum(query).rollup(rollup_method[, measure]).last(time_window) operator #``
567554
Example(APM Traces): ``error-tracking-traces(query).rollup(rollup_method[, measure]).last(time_window) operator #``
568555
569-
570556
* ``query`` The search query - following the `Log search syntax <https://docs.datadoghq.com/logs/search_syntax/>`_.
571557
* ``rollup_method`` The stats roll-up method - supports ``count`` , ``avg`` , and ``cardinality``.
572558
* ``measure`` For ``avg`` and cardinality ``rollup_method`` - specify the measure or the facet name you want to use.
@@ -719,7 +705,6 @@ def search_monitor_groups(
719705
:type per_page: int, optional
720706
:param sort: String for sort order, composed of field and sort order separate by a comma, for example ``name,asc``. Supported sort directions: ``asc`` , ``desc``. Supported fields:
721707
722-
723708
* ``name``
724709
* ``status``
725710
* ``tags``
@@ -765,7 +750,6 @@ def search_monitors(
765750
:type per_page: int, optional
766751
:param sort: String for sort order, composed of field and sort order separate by a comma, for example ``name,asc``. Supported sort directions: ``asc`` , ``desc``. Supported fields:
767752
768-
769753
* ``name``
770754
* ``status``
771755
* ``tags``

src/datadog_api_client/v1/api/organizations_api.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -275,7 +275,6 @@ def upload_idp_for_org(
275275
There are a couple of options for updating the Identity Provider (IdP)
276276
metadata from your SAML IdP.
277277
278-
279278
*
280279
**Multipart Form-Data** : Post the IdP metadata file using a form post.
281280

0 commit comments

Comments
 (0)