Skip to content

Commit c5344e4

Browse files
Merge branch 'main' into modularize-alerts
2 parents 556c37e + 8c8e86b commit c5344e4

File tree

19 files changed

+633
-278
lines changed

19 files changed

+633
-278
lines changed

Cargo.lock

Lines changed: 89 additions & 27 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

Cargo.toml

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ actix-web-prometheus = { version = "0.1" }
3333
actix-web-static-files = "4.0"
3434
http = "0.2.7"
3535
http-auth-basic = "0.3.3"
36-
tonic = { version = "0.12.3", features = ["tls", "transport", "gzip", "zstd"] }
36+
tonic = { version = "0.12.3", features = ["tls", "transport", "gzip", "zstd", "prost"] }
3737
tonic-web = "0.12.3"
3838
tower-http = { version = "0.6.1", features = ["cors"] }
3939
url = "2.4.0"
@@ -76,7 +76,13 @@ tokio-stream = { version = "0.1", features = ["fs"] }
7676
tokio-util = { version = "0.7" }
7777

7878
# Logging and Metrics
79-
opentelemetry-proto = { git = "https://github.com/parseablehq/opentelemetry-rust", branch = "fix-metrics-u64-serialization" }
79+
opentelemetry-proto = { version = "0.30.0", features = [
80+
"gen-tonic",
81+
"with-serde",
82+
"logs",
83+
"metrics",
84+
"trace",
85+
] }
8086
prometheus = { version = "0.13", features = ["process"] }
8187
prometheus-parse = "0.2.5"
8288
tracing = "0.1"
@@ -133,6 +139,7 @@ xxhash-rust = { version = "0.8", features = ["xxh3"] }
133139
futures-core = "0.3.31"
134140
tempfile = "3.20.0"
135141
lazy_static = "1.4.0"
142+
prost = "0.13.1"
136143

137144
[build-dependencies]
138145
cargo_toml = "0.21"

src/connectors/kafka/processor.rs

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -16,24 +16,24 @@
1616
*
1717
*/
1818

19-
use async_trait::async_trait;
20-
use futures_util::StreamExt;
21-
use rdkafka::consumer::{CommitMode, Consumer};
22-
use serde_json::Value;
23-
use std::collections::HashMap;
24-
use std::sync::Arc;
25-
use tokio_stream::wrappers::ReceiverStream;
26-
use tracing::{debug, error};
27-
2819
use crate::{
2920
connectors::common::processor::Processor,
3021
event::{
3122
Event as ParseableEvent, USER_AGENT_KEY,
3223
format::{EventFormat, LogSourceEntry, json},
3324
},
25+
handlers::TelemetryType,
3426
parseable::PARSEABLE,
3527
storage::StreamType,
3628
};
29+
use async_trait::async_trait;
30+
use futures_util::StreamExt;
31+
use rdkafka::consumer::{CommitMode, Consumer};
32+
use serde_json::Value;
33+
use std::collections::HashMap;
34+
use std::sync::Arc;
35+
use tokio_stream::wrappers::ReceiverStream;
36+
use tracing::{debug, error};
3737

3838
use super::{ConsumerRecord, StreamConsumer, TopicPartition, config::BufferConfig};
3939

@@ -50,13 +50,13 @@ impl ParseableSinkProcessor {
5050
.map(|r| r.topic.as_str())
5151
.unwrap_or_default();
5252
let log_source_entry = LogSourceEntry::default();
53-
5453
PARSEABLE
5554
.create_stream_if_not_exists(
5655
stream_name,
5756
StreamType::UserDefined,
5857
None,
5958
vec![log_source_entry],
59+
TelemetryType::default(),
6060
)
6161
.await?;
6262

0 commit comments

Comments
 (0)