Skip to content

Commit daa5089

Browse files
committed
checkpoint
1 parent 5f0ad54 commit daa5089

File tree

4 files changed

+138
-34
lines changed

4 files changed

+138
-34
lines changed

datadog_lambda/wrapper.py

Lines changed: 116 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -177,11 +177,13 @@ def __init__(self, func):
177177

178178
def __call__(self, event, context, **kwargs):
179179
"""Executes when the wrapped function gets called"""
180-
self._before(event, context)
180+
actual_event, custom_probes = self._extract_di_request(event)
181+
self._before(actual_event, context, custom_probes)
181182
try:
182183
if self.blocking_response:
183184
return self.blocking_response
184-
self.response = self.func(event, context, **kwargs)
185+
# Pass unwrapped event to user's handler
186+
self.response = self.func(actual_event, context, **kwargs)
185187
return self.response
186188
except BlockingException:
187189
self.blocking_response = get_asm_blocked_response(self.event_source)
@@ -194,10 +196,109 @@ def __call__(self, event, context, **kwargs):
194196
self.span.set_traceback()
195197
raise
196198
finally:
197-
self._after(event, context)
199+
self._after(actual_event, context, custom_probes)
198200
if self.blocking_response:
199201
return self.blocking_response
200202

203+
def _enable_dynamic_instrumentation(self):
204+
from ddtrace.debugging import DynamicInstrumentation
205+
DynamicInstrumentation.enable()
206+
logger.debug("DI framework enabled for this invocation")
207+
208+
def _disable_dynamic_instrumentation(self):
209+
try:
210+
from ddtrace.debugging import DynamicInstrumentation
211+
DynamicInstrumentation.disable()
212+
logger.debug("DI framework disabled after invocation")
213+
except Exception as e:
214+
logger.error(f"Failed to disable DI framework: {e}")
215+
216+
def _inject_probes(self, probe_definitions):
217+
if not probe_definitions:
218+
return
219+
220+
try:
221+
from ddtrace.debugging._debugger import Debugger
222+
from ddtrace.debugging._probe.remoteconfig import build_probe, ProbePollerEvent
223+
224+
# Get the debugger instance
225+
debugger = Debugger._instance
226+
if debugger is None:
227+
logger.error("Debugger instance not available, cannot inject probes")
228+
return
229+
230+
# Convert probe definitions to Probe objects
231+
probes = [build_probe(p) for p in probe_definitions]
232+
233+
# Track probe IDs for cleanup
234+
for probe in probes:
235+
self._active_probe_ids.append(probe.probe_id)
236+
237+
# Inject probes using the debugger's configuration method
238+
debugger._on_configuration(ProbePollerEvent.NEW_PROBES, probes)
239+
logger.debug(f"Injected {len(probes)} probes: {[p.probe_id for p in probes]}")
240+
241+
except Exception as e:
242+
logger.error(f"Failed to inject custom probes: {e}")
243+
import traceback
244+
logger.error(f"Traceback: {traceback.format_exc()}")
245+
246+
def _remove_probes(self):
247+
try:
248+
from ddtrace.debugging._debugger import Debugger
249+
from ddtrace.debugging._probe.remoteconfig import ProbePollerEvent
250+
251+
debugger = Debugger._instance
252+
if debugger is None:
253+
logger.debug("Debugger instance not available, cannot remove probes")
254+
return
255+
256+
probes_to_remove = []
257+
for probe_id in self._active_probe_ids:
258+
probe = debugger._probe_registry.get(probe_id)
259+
if probe is not None:
260+
probes_to_remove.append(probe)
261+
logger.debug(f"Marking probe for removal: {probe_id}")
262+
else:
263+
logger.debug(f"Probe {probe_id} not found in registry")
264+
if probes_to_remove:
265+
debugger._on_configuration(ProbePollerEvent.DELETED_PROBES, probes_to_remove)
266+
logger.debug(f"Removed {len(probes_to_remove)} probes")
267+
268+
self._active_probe_ids = []
269+
270+
except Exception as e:
271+
logger.error(f"Failed to remove probes: {e}")
272+
import traceback
273+
logger.error(f"Traceback: {traceback.format_exc()}")
274+
275+
def _extract_di_request(self, event):
276+
"""
277+
Check if event requests Dynamic Instrumentation and extract actual payload and probes.
278+
279+
Supported formats:
280+
1. Regular payload: {"key1": "value1"}
281+
-> (event, False, None)
282+
DI is NOT enabled
283+
284+
2. DI with custom probes: {"probes": [...], "payload": {...}}
285+
-> (unwrapped_payload, True, [probe_definitions])
286+
DI is enabled with custom probes, both disabled/removed after
287+
Note: Presence of "probes" implies enableDI=true
288+
289+
Returns: (actual_payload, probes)
290+
"""
291+
if isinstance(event, dict):
292+
has_probes = "probes" in event
293+
294+
if has_probes:
295+
probes = event.get("probes", None)
296+
actual_payload = event.get("payload", event)
297+
logger.debug(f"DI request detected: custom_probes={probes is not None}, probe_count={len(probes) if probes else 0}")
298+
return actual_payload, probes
299+
300+
return event, []
301+
201302
def _inject_authorizer_span_headers(self, request_id):
202303
reference_span = self.inferred_span if self.inferred_span else self.span
203304
assert reference_span.finished
@@ -226,12 +327,18 @@ def _inject_authorizer_span_headers(self, request_id):
226327
self.response.setdefault("context", {})
227328
self.response["context"]["_datadog"] = datadog_data
228329

229-
def _before(self, event, context):
330+
def _before(self, event, context, custom_probes=None):
230331
try:
231332
self.response = None
232333
self.blocking_response = None
334+
self._active_probe_ids = []
233335
set_cold_start(init_timestamp_ns)
234336

337+
if custom_probes:
338+
logger.debug(f"Injecting {len(custom_probes)} custom probes for this invocation")
339+
self._enable_dynamic_instrumentation()
340+
self._inject_probes(custom_probes)
341+
235342
if not should_use_extension:
236343
from datadog_lambda.metric import submit_invocations_metric
237344

@@ -289,8 +396,12 @@ def _before(self, event, context):
289396
except Exception as e:
290397
logger.error(format_err_with_traceback(e))
291398

292-
def _after(self, event, context):
399+
def _after(self, event, context, custom_probes=None):
293400
try:
401+
if custom_probes:
402+
self._remove_probes()
403+
self._disable_dynamic_instrumentation()
404+
294405
from datadog_lambda.metric import submit_batch_item_failures_metric
295406

296407
submit_batch_item_failures_metric(self.response, context)

scripts/publish_layers.sh

Lines changed: 13 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -42,18 +42,18 @@ LAYER_PATHS=(
4242
".layers/datadog_lambda_py-arm64-3.13.zip"
4343
)
4444
AVAILABLE_LAYERS=(
45-
"Datadog-Python38"
46-
"Datadog-Python38-ARM"
47-
"Datadog-Python39"
48-
"Datadog-Python39-ARM"
49-
"Datadog-Python310"
50-
"Datadog-Python310-ARM"
51-
"Datadog-Python311"
52-
"Datadog-Python311-ARM"
53-
"Datadog-Python312"
54-
"Datadog-Python312-ARM"
55-
"Datadog-Python313"
56-
"Datadog-Python313-ARM"
45+
"John-Datadog-Python38"
46+
"John-Datadog-Python38-ARM"
47+
"John-Datadog-Python39"
48+
"John-Datadog-Python39-ARM"
49+
"John-Datadog-Python310"
50+
"John-Datadog-Python310-ARM"
51+
"John-Datadog-Python311"
52+
"John-Datadog-Python311-ARM"
53+
"John-Datadog-Python312"
54+
"John-Datadog-Python312-ARM"
55+
"John-Datadog-Python313"
56+
"John-Datadog-Python313-ARM"
5757
)
5858
AVAILABLE_REGIONS=$(aws ec2 describe-regions | jq -r '.[] | .[] | .RegionName')
5959

@@ -63,15 +63,6 @@ PIDS=()
6363
# Makes sure any subprocesses will be terminated with this process
6464
trap "pkill -P $$; exit 1;" INT
6565

66-
# Check that the layer files exist
67-
for layer_file in "${LAYER_PATHS[@]}"
68-
do
69-
if [ ! -f $layer_file ]; then
70-
echo "Could not find $layer_file."
71-
exit 1
72-
fi
73-
done
74-
7566
# Determine the target regions
7667
if [ -z "$REGIONS" ]; then
7768
echo "Region not specified, running for all available regions."
@@ -110,11 +101,7 @@ else
110101
echo "Layer version specified: $VERSION"
111102
fi
112103

113-
read -p "Ready to publish version $VERSION of layers ${LAYERS[*]} to regions ${REGIONS[*]} (y/n)?" CONT
114-
if [ "$CONT" != "y" ]; then
115-
echo "Exiting"
116-
exit 1
117-
fi
104+
echo "Publishing version $VERSION of layers ${LAYERS[*]} to regions ${REGIONS[*]}"
118105

119106
index_of_layer() {
120107
layer_name=$1

scripts/publish_sandbox.sh

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3,9 +3,12 @@
33
# Usage: VERSION=5 ./scripts/publish_sandbox.sh
44
set -e
55

6-
./scripts/build_layers.sh
7-
aws-vault exec sso-serverless-sandbox-account-admin -- ./scripts/sign_layers.sh sandbox
8-
aws-vault exec sso-serverless-sandbox-account-admin -- ./scripts/publish_layers.sh
6+
# Build only ARM64 layers for Python 3.12
7+
ARCH=arm64 PYTHON_VERSION=3.12 ./scripts/build_layers.sh
8+
# Signing is commented out for sandbox - not needed for internal testing
9+
## aws-vault exec sso-serverless-sandbox-account-admin -- ./scripts/sign_layers.sh sandbox
10+
# Publish to us-east-1 only
11+
LAYERS=John-Datadog-Python312-ARM VERSION=$VERSION REGIONS=us-east-1 aws-vault exec sso-serverless-sandbox-account-admin -- ./scripts/publish_layers.sh
912

1013
# Automatically create PR against github.com/DataDog/documentation
1114
# If you'd like to test, please uncomment the below line

venv/pyvenv.cfg

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
home = /Users/john.chrostek/.pyenv/versions/3.10.13/bin
2+
include-system-site-packages = false
3+
version = 3.10.13

0 commit comments

Comments
 (0)