Skip to content

Commit 9b21a13

Browse files
committed
refactor: readability
1 parent 5f9ec0c commit 9b21a13

File tree

2 files changed

+54
-54
lines changed

2 files changed

+54
-54
lines changed

src/kafka.rs

Lines changed: 52 additions & 52 deletions
Original file line numberDiff line numberDiff line change
@@ -167,67 +167,67 @@ fn resolve_schema(stream_name: &str) -> Result<HashMap<String, Arc<Field>>, Kafk
167167
}
168168

169169
async fn ingest_message<'a>(stream_name: &str, msg: BorrowedMessage<'a>) -> Result<(), KafkaError> {
170-
if let Some(payload) = msg.payload() {
171-
// stream should get created only if there is an incoming event, not before that
172-
create_stream_if_not_exists(stream_name, &StreamType::UserDefined.to_string()).await?;
173-
174-
let schema = resolve_schema(stream_name)?;
175-
let event = format::json::Event {
176-
data: serde_json::from_slice(payload)?,
177-
tags: String::default(),
178-
metadata: String::default(),
179-
};
180-
181-
let time_partition = STREAM_INFO.get_time_partition(stream_name)?;
182-
let static_schema_flag = STREAM_INFO.get_static_schema_flag(stream_name)?;
183-
184-
let (rb, is_first) = event
185-
.into_recordbatch(schema, static_schema_flag, time_partition)
186-
.map_err(|err| KafkaError::PostError(PostError::CustomError(err.to_string())))?;
187-
188-
event::Event {
189-
rb,
190-
stream_name: stream_name.to_string(),
191-
origin_format: "json",
192-
origin_size: payload.len() as u64,
193-
is_first_event: is_first,
194-
parsed_timestamp: Utc::now().naive_utc(),
195-
time_partition: None,
196-
custom_partition_values: HashMap::new(),
197-
stream_type: StreamType::UserDefined,
198-
}
199-
.process()
200-
.await?;
201-
} else {
170+
let Some(payload) = msg.payload() else {
202171
debug!("{} No payload for stream", stream_name);
172+
return Ok(());
173+
};
174+
175+
// stream should get created only if there is an incoming event, not before that
176+
create_stream_if_not_exists(stream_name, &StreamType::UserDefined.to_string()).await?;
177+
178+
let schema = resolve_schema(stream_name)?;
179+
let event = format::json::Event {
180+
data: serde_json::from_slice(payload)?,
181+
tags: String::default(),
182+
metadata: String::default(),
183+
};
184+
185+
let time_partition = STREAM_INFO.get_time_partition(stream_name)?;
186+
let static_schema_flag = STREAM_INFO.get_static_schema_flag(stream_name)?;
187+
188+
let (rb, is_first) = event
189+
.into_recordbatch(schema, static_schema_flag, time_partition)
190+
.map_err(|err| KafkaError::PostError(PostError::CustomError(err.to_string())))?;
191+
192+
event::Event {
193+
rb,
194+
stream_name: stream_name.to_string(),
195+
origin_format: "json",
196+
origin_size: payload.len() as u64,
197+
is_first_event: is_first,
198+
parsed_timestamp: Utc::now().naive_utc(),
199+
time_partition: None,
200+
custom_partition_values: HashMap::new(),
201+
stream_type: StreamType::UserDefined,
203202
}
203+
.process()
204+
.await?;
205+
204206
Ok(())
205207
}
206208

207209
pub async fn setup_integration() {
208-
tokio::task::spawn(async move {
209-
let (consumer, stream_name) = match setup_consumer() {
210-
Ok(c) => c,
211-
Err(err) => {
212-
match err {
213-
KafkaError::DoNotPrintError => {
214-
debug!("P_KAFKA_TOPIC not set, skipping kafka integration");
215-
}
216-
_ => {
217-
error!("{err}");
218-
}
210+
let (consumer, stream_name) = match setup_consumer() {
211+
Ok(c) => c,
212+
Err(err) => {
213+
match err {
214+
KafkaError::DoNotPrintError => {
215+
debug!("P_KAFKA_TOPIC not set, skipping kafka integration");
216+
}
217+
_ => {
218+
error!("{err}");
219219
}
220-
return;
221220
}
222-
};
221+
return;
222+
}
223+
};
223224

224-
info!("Setup kafka integration for {stream_name}");
225-
let mut stream = consumer.stream();
225+
info!("Setup kafka integration for {stream_name}");
226+
let mut stream = consumer.stream();
226227

227-
while let Ok(curr) = stream.next().await.unwrap() {
228-
if let Err(err) = ingest_message(&stream_name, curr).await {
229-
error!("Unable to ingest incoming kafka message- {err}"),
230-
}
228+
while let Ok(curr) = stream.next().await.unwrap() {
229+
if let Err(err) = ingest_message(&stream_name, curr).await {
230+
error!("Unable to ingest incoming kafka message- {err}")
231231
}
232-
});
232+
}
233233
}

src/main.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -47,8 +47,8 @@ async fn main() -> anyhow::Result<()> {
4747
metadata.set_global();
4848

4949
// load kafka server
50-
if CONFIG.parseable.mode.ne(&Mode::Query) {
51-
kafka::setup_integration().await;
50+
if CONFIG.parseable.mode != Mode::Query {
51+
tokio::task::spawn(kafka::setup_integration());
5252
}
5353

5454
server.init().await?;

0 commit comments

Comments
 (0)