Skip to content

Commit 25e29d5

Browse files
committed
Merge branch 'main' into rayz/otlp-use-stringbuilder
2 parents 2ea3ba0 + 8e261e7 commit 25e29d5

File tree

8 files changed

+281
-86
lines changed

8 files changed

+281
-86
lines changed
Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
# Docs: https://datadoghq.atlassian.net/wiki/spaces/SECENG/pages/5138645099/User+guide+dd-octo-sts
2+
issuer: https://token.actions.githubusercontent.com
3+
4+
subject: repo:DataDog/saluki:ref:refs/tags/*
5+
6+
claim_pattern:
7+
event_name: (push|workflow_dispatch)
8+
ref: refs/tags/[0-9]+\.[0-9]+\.[0-9]+
9+
ref_protected: "false"
10+
job_workflow_ref: DataDog/saluki/\.github/workflows/bump-adp-version\.yml@refs/tags/[0-9]+\.[0-9]+\.[0-9]+
11+
12+
permissions:
13+
contents: write
14+
pull_requests: write
Lines changed: 167 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,167 @@
1+
name: Bump ADP Version
2+
3+
on:
4+
push:
5+
tags:
6+
- "[0-9]+.[0-9]+.[0-9]+"
7+
workflow_dispatch:
8+
9+
jobs:
10+
bump-version:
11+
runs-on: ubuntu-latest
12+
permissions:
13+
id-token: write
14+
steps:
15+
- name: Get access token from dd-octo-sts
16+
uses: DataDog/dd-octo-sts-action@acaa02eee7e3bb0839e4272dacb37b8f3b58ba80 # v1.0.3
17+
id: octo-sts
18+
with:
19+
scope: DataDog/saluki
20+
policy: self.bump-adp-version.create-pr
21+
22+
- name: Checkout repository
23+
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
24+
with:
25+
token: "${{ steps.octo-sts.outputs.token }}"
26+
ref: main
27+
28+
- name: Configure Git
29+
run: |
30+
git config user.name "github-actions[bot]"
31+
git config user.email "github-actions[bot]@users.noreply.github.com"
32+
33+
- name: Calculate new version
34+
id: version
35+
run: |
36+
CURRENT_VERSION=$(grep -E '^version = "' bin/agent-data-plane/Cargo.toml | head -n 1 | cut -d '"' -f 2)
37+
echo "Current version: $CURRENT_VERSION"
38+
39+
MAJOR=$(echo $CURRENT_VERSION | cut -d '.' -f 1)
40+
MINOR=$(echo $CURRENT_VERSION | cut -d '.' -f 2)
41+
PATCH=$(echo $CURRENT_VERSION | cut -d '.' -f 3)
42+
NEW_PATCH=$((PATCH + 1))
43+
NEW_VERSION="${MAJOR}.${MINOR}.${NEW_PATCH}"
44+
45+
echo "New version: $NEW_VERSION"
46+
echo "current_version=$CURRENT_VERSION" >> $GITHUB_OUTPUT
47+
echo "new_version=$NEW_VERSION" >> $GITHUB_OUTPUT
48+
49+
- name: Update version in Cargo.toml
50+
run: |
51+
CURRENT_VERSION="${{ steps.version.outputs.current_version }}"
52+
NEW_VERSION="${{ steps.version.outputs.new_version }}"
53+
54+
sed -i "s/^version = \"$CURRENT_VERSION\"/version = \"$NEW_VERSION\"/" bin/agent-data-plane/Cargo.toml
55+
56+
echo "Updated bin/agent-data-plane/Cargo.toml"
57+
58+
- name: Update Cargo.lock
59+
run: cargo update -p agent-data-plane --quiet
60+
61+
- name: Create remote branch
62+
run: |
63+
BRANCH_NAME=bump-adp-version-${{ steps.version.outputs.new_version }}
64+
git push origin HEAD:refs/heads/${BRANCH_NAME}
65+
66+
- name: Commit Changes
67+
id: commit
68+
run: |
69+
git add bin/agent-data-plane/Cargo.toml Cargo.lock
70+
git commit -m "chore(agent-data-plane): bump version to ${{ steps.version.outputs.new_version }}"
71+
echo "commit_sha=$(git rev-parse HEAD)" >> $GITHUB_OUTPUT
72+
73+
- name: Sign Commit
74+
uses: DataDog/commit-headless@5a0f3876e0fbdd3a86b3e008acf4ec562db59eee # v2.0.1
75+
with:
76+
token: "${{ steps.octo-sts.outputs.token }}"
77+
branch: bump-adp-version-${{ steps.version.outputs.new_version }}
78+
head-sha: ${{ github.sha }}
79+
command: push
80+
commits: "${{ steps.commit.outputs.commit_sha }}"
81+
82+
- name: Create Pull Request
83+
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
84+
with:
85+
github-token: "${{ steps.octo-sts.outputs.token }}"
86+
script: |
87+
const branchName = `bump-adp-version-${{ steps.version.outputs.new_version }}`;
88+
const prTitle = `chore(agent-data-plane): bump version to ${{ steps.version.outputs.new_version }}`;
89+
const prBody = `## Summary
90+
91+
This PR bumps the Agent Data Plane version to ${{ steps.version.outputs.new_version }} for the next development cycle.
92+
93+
### Details of change
94+
95+
- **Previous version:** ${{ steps.version.outputs.current_version }}
96+
- **New version:** ${{ steps.version.outputs.new_version }}
97+
98+
_This PR was automatically generated by the [Bump ADP Version](.github/workflows/bump-adp-version.yml) workflow._
99+
100+
## Change Type
101+
102+
- [ ] Bug fix
103+
- [ ] New feature
104+
- [x] Non-functional (chore, refactoring, docs)
105+
- [ ] Performance`;
106+
const prLabel = 'ci/bump-adp-version';
107+
108+
// Make sure we don't have an open PR for the exact same change.
109+
const { data: exactPRData } = await github.rest.search.issuesAndPullRequests({
110+
q: `repo:${context.repo.owner}/${context.repo.repo} is:pr is:open head:${branchName}`
111+
});
112+
113+
const exactPRs = exactPRData.items;
114+
115+
if (exactPRs.length > 0) {
116+
console.log(`Pull request for this version already exists: ${exactPRs[0].html_url}`);
117+
return;
118+
}
119+
120+
// Create the new pull request.
121+
const updatePR = await github.rest.pulls.create({
122+
owner: context.repo.owner,
123+
repo: context.repo.repo,
124+
title: prTitle,
125+
head: branchName,
126+
base: 'main',
127+
body: prBody,
128+
});
129+
console.log(`Pull request created: ${updatePR.data.html_url}`);
130+
131+
// Set the right labels on the PR.
132+
await github.rest.issues.addLabels({
133+
owner: context.repo.owner,
134+
repo: context.repo.repo,
135+
issue_number: updatePR.data.number,
136+
labels: [prLabel, 'automated']
137+
});
138+
139+
// Look for other open PRs for bumping the ADP version, and close them out so this one takes precedence.
140+
const { data: labeledPRData } = await github.rest.search.issuesAndPullRequests({
141+
q: `repo:${context.repo.owner}/${context.repo.repo} is:pr is:open label:"${prLabel}"`
142+
});
143+
144+
const obsoleteUpdatePRs = labeledPRData.items;
145+
146+
// Close any existing PRs created from this workflow so that we only have the latest one.
147+
for (const obsoleteUpdatePR of obsoleteUpdatePRs) {
148+
// Don't close the PR we just opened.
149+
if (obsoleteUpdatePR.number == updatePR.data.number) {
150+
continue;
151+
}
152+
153+
console.log(`Closing existing PR: ${obsoleteUpdatePR.html_url}`);
154+
await github.rest.pulls.update({
155+
owner: context.repo.owner,
156+
repo: context.repo.repo,
157+
pull_number: obsoleteUpdatePR.number,
158+
state: 'closed'
159+
});
160+
161+
await github.rest.issues.createComment({
162+
owner: context.repo.owner,
163+
repo: context.repo.repo,
164+
issue_number: obsoleteUpdatePR.number,
165+
body: `Closing this pull request as a newer version bump is available: [update PR](https://github.com/${context.repo.owner}/${context.repo.repo}/pull/${updatePR.data.number})`
166+
});
167+
}

Cargo.lock

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

bin/agent-data-plane/Cargo.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[package]
22
name = "agent-data-plane"
3-
version = "0.1.31"
3+
version = "0.1.32"
44
edition = { workspace = true }
55
license = { workspace = true }
66
repository = { workspace = true }

lib/saluki-components/src/common/otlp/traces/translator.rs

Lines changed: 33 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
use std::collections::hash_map::IntoIter;
12
use std::num::NonZeroUsize;
23
use std::sync::Arc;
34

@@ -6,7 +7,7 @@ use otlp_protos::opentelemetry::proto::resource::v1::Resource as OtlpResource;
67
use otlp_protos::opentelemetry::proto::trace::v1::ResourceSpans;
78
use saluki_common::collections::FastHashMap;
89
use saluki_common::strings::StringBuilder;
9-
use saluki_context::tags::TagSet;
10+
use saluki_context::tags::{SharedTagSet, TagSet};
1011
use saluki_core::data_model::event::trace::{Span as DdSpan, Trace, TraceSampling};
1112
use saluki_core::data_model::event::Event;
1213
use stringtheory::interning::GenericMapInterner;
@@ -74,7 +75,7 @@ impl OtlpTracesTranslator {
7475
}
7576
}
7677

77-
pub fn translate_resource_spans(&mut self, resource_spans: ResourceSpans, metrics: &Metrics) -> Vec<Event> {
78+
pub fn translate_spans(&mut self, resource_spans: ResourceSpans, metrics: &Metrics) -> impl Iterator<Item = Event> {
7879
let resource: OtlpResource = resource_spans.resource.unwrap_or_default();
7980
let ignore_missing_fields = self.config.ignore_missing_datadog_fields;
8081
let compute_top_level = self.config.enable_otlp_compute_top_level_by_span_kind;
@@ -120,23 +121,38 @@ impl OtlpTracesTranslator {
120121
}
121122
}
122123

123-
traces_by_id
124-
.into_iter()
125-
.filter_map(|(_, entry)| {
126-
if entry.spans.is_empty() {
127-
None
128-
} else {
129-
let mut trace = Trace::new(entry.spans, resource_tags.clone());
124+
OtlpTraceEventsIter {
125+
resource_tags,
126+
entries: traces_by_id.into_iter(),
127+
}
128+
}
129+
}
130130

131-
// Set the trace-level sampling priority if one was found
132-
if let Some(priority) = entry.priority {
133-
trace.set_sampling(Some(TraceSampling::new(false, Some(priority), None, None)));
134-
}
131+
struct OtlpTraceEventsIter {
132+
resource_tags: SharedTagSet,
133+
entries: IntoIter<u64, TraceEntry>,
134+
}
135135

136-
Some(Event::Trace(trace))
137-
}
138-
})
139-
.collect()
136+
impl Iterator for OtlpTraceEventsIter {
137+
type Item = Event;
138+
139+
fn next(&mut self) -> Option<Self::Item> {
140+
for (_, entry) in self.entries.by_ref() {
141+
if entry.spans.is_empty() {
142+
continue;
143+
}
144+
145+
let mut trace = Trace::new(entry.spans, self.resource_tags.clone());
146+
147+
// Set the trace-level sampling priority if one was found
148+
if let Some(priority) = entry.priority {
149+
trace.set_sampling(Some(TraceSampling::new(false, Some(priority), None, None)));
150+
}
151+
152+
return Some(Event::Trace(trace));
153+
}
154+
155+
None
140156
}
141157
}
142158

lib/saluki-components/src/decoders/otlp/mod.rs

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -135,8 +135,7 @@ impl Decoder for OtlpDecoder {
135135
};
136136

137137
for resource_spans in request.resource_spans {
138-
let trace_events = traces_translator.translate_resource_spans(resource_spans, &metrics);
139-
for trace_event in trace_events {
138+
for trace_event in traces_translator.translate_spans(resource_spans, &metrics) {
140139
if let Some(event_buffer) = event_buffer_manager.try_push(trace_event) {
141140
if let Err(e) = context.dispatcher().dispatch(event_buffer).await {
142141
error!(error = %e, "Failed to dispatch trace events.");

lib/saluki-components/src/sources/otlp/metrics/translator.rs

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
use std::collections::HashSet;
44
use std::sync::LazyLock;
55
use std::time::{SystemTime, UNIX_EPOCH};
6+
use std::vec::IntoIter;
67

78
use otlp_protos::opentelemetry::proto::common::v1::KeyValue as OtlpKeyValue;
89
use otlp_protos::opentelemetry::proto::metrics::v1::{
@@ -96,11 +97,11 @@ impl OtlpMetricsTranslator {
9697
}
9798
}
9899

99-
/// Translates a batch of OTLP `ResourceMetrics` into a collection of Saluki `Event`s.
100+
/// Translates a batch of OTLP `ResourceMetrics` into Saluki `Event`s.
100101
/// This is the Rust equivalent of the Go `MapMetrics` function.
101-
pub fn map_metrics(
102+
pub fn translate_metrics(
102103
&mut self, resource_metrics: OtlpResourceMetrics, metrics: &Metrics,
103-
) -> Result<Vec<Event>, GenericError> {
104+
) -> Result<IntoIter<Event>, GenericError> {
104105
let mut events = Vec::new();
105106
let resource = resource_metrics.resource.unwrap_or_default();
106107
let source = self.attribute_translator.resource_to_source(&resource);
@@ -204,7 +205,7 @@ impl OtlpMetricsTranslator {
204205

205206
metrics.metrics_received().increment(events.len() as u64);
206207

207-
Ok(events)
208+
Ok(events.into_iter())
208209
}
209210

210211
/// Creates a new `OtlpMetricsTranslator` for tests.

0 commit comments

Comments
 (0)