Skip to content

Commit ddef8ee

Browse files
parmesantnikhilsinhaparseablede-sh
authored
fix: build fix for Kafka (#1079)
Signed-off-by: parmesant <[email protected]> Co-authored-by: Nikhil Sinha <[email protected]> Co-authored-by: Devdutt Shenoi <[email protected]>
1 parent 8a9448d commit ddef8ee

File tree

6 files changed

+32
-16
lines changed

6 files changed

+32
-16
lines changed

Cargo.toml

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,6 @@ once_cell = "1.17.1"
6767
opentelemetry-proto = {git = "https://github.com/parseablehq/opentelemetry-rust", branch="fix-metrics-u64-serialization"}
6868
prometheus = { version = "0.13", features = ["process"] }
6969
rand = "0.8.5"
70-
rdkafka = { version = "0.36.2", default-features = false, features = ["tokio"] }
7170
regex = "1.7.3"
7271
relative-path = { version = "1.7", features = ["serde"] }
7372
reqwest = { version = "0.11.27", default-features = false, features = [
@@ -136,3 +135,7 @@ debug = []
136135
inherits = "release"
137136
lto = "fat"
138137
codegen-units = 1
138+
139+
# adding rdkafka here because, for unsupported platforms, cargo skips other deps which come after this
140+
[target.'cfg(all(target_os = "linux", target_arch = "x86_64"))'.dependencies]
141+
rdkafka = { version = "0.36.2", default-features = false, features = ["tokio"] }

src/cli.rs

Lines changed: 18 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -22,11 +22,16 @@ use std::path::PathBuf;
2222
use url::Url;
2323

2424
use crate::{
25-
kafka::SslProtocol,
2625
oidc::{self, OpenidConfig},
2726
option::{validation, Compression, Mode},
2827
};
2928

29+
#[cfg(all(target_os = "linux", target_arch = "x86_64"))]
30+
use crate::kafka::SslProtocol as KafkaSslProtocol;
31+
32+
#[cfg(not(all(target_os = "linux", target_arch = "x86_64")))]
33+
use std::string::String as KafkaSslProtocol;
34+
3035
#[derive(Debug, Default)]
3136
pub struct Cli {
3237
/// The location of TLS Cert file
@@ -107,7 +112,7 @@ pub struct Cli {
107112
pub kafka_host: Option<String>,
108113
pub kafka_group: Option<String>,
109114
pub kafka_client_id: Option<String>,
110-
pub kafka_security_protocol: Option<SslProtocol>,
115+
pub kafka_security_protocol: Option<KafkaSslProtocol>,
111116
pub kafka_partitions: Option<String>,
112117

113118
// Audit Logging env vars
@@ -502,16 +507,17 @@ impl FromArgMatches for Cli {
502507
}
503508

504509
fn update_from_arg_matches(&mut self, m: &clap::ArgMatches) -> Result<(), clap::Error> {
505-
self.kafka_topics = m.get_one::<String>(Self::KAFKA_TOPICS).cloned();
506-
self.kafka_security_protocol = m
507-
.get_one::<SslProtocol>(Self::KAFKA_SECURITY_PROTOCOL)
508-
.cloned();
509-
self.kafka_group = m.get_one::<String>(Self::KAFKA_GROUP).cloned();
510-
self.kafka_client_id = m.get_one::<String>(Self::KAFKA_CLIENT_ID).cloned();
511-
self.kafka_security_protocol = m
512-
.get_one::<SslProtocol>(Self::KAFKA_SECURITY_PROTOCOL)
513-
.cloned();
514-
self.kafka_partitions = m.get_one::<String>(Self::KAFKA_PARTITIONS).cloned();
510+
#[cfg(all(target_os = "linux", target_arch = "x86_64"))]
511+
{
512+
self.kafka_topics = m.get_one::<String>(Self::KAFKA_TOPICS).cloned();
513+
self.kafka_security_protocol = m
514+
.get_one::<KafkaSslProtocol>(Self::KAFKA_SECURITY_PROTOCOL)
515+
.cloned();
516+
self.kafka_group = m.get_one::<String>(Self::KAFKA_GROUP).cloned();
517+
self.kafka_client_id = m.get_one::<String>(Self::KAFKA_CLIENT_ID).cloned();
518+
self.kafka_host = m.get_one::<String>(Self::KAFKA_HOST).cloned();
519+
self.kafka_partitions = m.get_one::<String>(Self::KAFKA_PARTITIONS).cloned();
520+
}
515521

516522
self.audit_logger = m.get_one::<Url>(Self::AUDIT_LOGGER).cloned();
517523
self.audit_username = m.get_one::<String>(Self::AUDIT_USERNAME).cloned();

src/handlers/http/health_check.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ pub async fn handle_signals(shutdown_signal: Arc<Mutex<Option<oneshot::Sender<()
5858
{
5959
tokio::select! {
6060
_ = ctrl_c() => {
61-
log::info!("Received SIGINT signal at Readiness Probe Handler");
61+
info!("Received SIGINT signal at Readiness Probe Handler");
6262
shutdown(shutdown_signal).await;
6363
}
6464
}

src/lib.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@ pub mod correlation;
2727
mod event;
2828
pub mod handlers;
2929
pub mod hottier;
30+
#[cfg(all(target_os = "linux", target_arch = "x86_64"))]
3031
pub mod kafka;
3132
mod livetail;
3233
mod metadata;

src/main.rs

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,12 +17,15 @@
1717
*/
1818

1919
use parseable::{
20-
banner, kafka,
20+
banner,
2121
option::{Mode, CONFIG},
2222
rbac, storage, IngestServer, ParseableServer, QueryServer, Server,
2323
};
2424
use tracing_subscriber::EnvFilter;
2525

26+
#[cfg(all(target_os = "linux", target_arch = "x86_64"))]
27+
use parseable::kafka;
28+
2629
#[actix_web::main]
2730
async fn main() -> anyhow::Result<()> {
2831
tracing_subscriber::fmt()
@@ -46,6 +49,7 @@ async fn main() -> anyhow::Result<()> {
4649
// keep metadata info in mem
4750
metadata.set_global();
4851

52+
#[cfg(all(target_os = "linux", target_arch = "x86_64"))]
4953
// load kafka server
5054
if CONFIG.parseable.mode != Mode::Query {
5155
tokio::task::spawn(kafka::setup_integration());

src/query/stream_schema_provider.rs

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -301,7 +301,9 @@ impl StandardTableProvider {
301301
#[cfg(windows)]
302302
{
303303
if CONFIG.storage_name.eq("drive") {
304-
file_path = object_store::path::Path::from_absolute_path(file_path).unwrap();
304+
file_path = object_store::path::Path::from_absolute_path(file_path)
305+
.unwrap()
306+
.to_string();
305307
}
306308
}
307309
let pf = PartitionedFile::new(file_path, file.file_size);

0 commit comments

Comments
 (0)