@@ -167,67 +167,67 @@ fn resolve_schema(stream_name: &str) -> Result<HashMap<String, Arc<Field>>, Kafk
167
167
}
168
168
169
169
async fn ingest_message < ' a > ( stream_name : & str , msg : BorrowedMessage < ' a > ) -> Result < ( ) , KafkaError > {
170
- if let Some ( payload) = msg. payload ( ) {
171
- // stream should get created only if there is an incoming event, not before that
172
- create_stream_if_not_exists ( stream_name, & StreamType :: UserDefined . to_string ( ) ) . await ?;
173
-
174
- let schema = resolve_schema ( stream_name) ?;
175
- let event = format:: json:: Event {
176
- data : serde_json:: from_slice ( payload) ?,
177
- tags : String :: default ( ) ,
178
- metadata : String :: default ( ) ,
179
- } ;
180
-
181
- let time_partition = STREAM_INFO . get_time_partition ( stream_name) ?;
182
- let static_schema_flag = STREAM_INFO . get_static_schema_flag ( stream_name) ?;
183
-
184
- let ( rb, is_first) = event
185
- . into_recordbatch ( schema, static_schema_flag, time_partition)
186
- . map_err ( |err| KafkaError :: PostError ( PostError :: CustomError ( err. to_string ( ) ) ) ) ?;
187
-
188
- event:: Event {
189
- rb,
190
- stream_name : stream_name. to_string ( ) ,
191
- origin_format : "json" ,
192
- origin_size : payload. len ( ) as u64 ,
193
- is_first_event : is_first,
194
- parsed_timestamp : Utc :: now ( ) . naive_utc ( ) ,
195
- time_partition : None ,
196
- custom_partition_values : HashMap :: new ( ) ,
197
- stream_type : StreamType :: UserDefined ,
198
- }
199
- . process ( )
200
- . await ?;
201
- } else {
170
+ let Some ( payload) = msg. payload ( ) else {
202
171
debug ! ( "{} No payload for stream" , stream_name) ;
172
+ return Ok ( ( ) ) ;
173
+ } ;
174
+
175
+ // stream should get created only if there is an incoming event, not before that
176
+ create_stream_if_not_exists ( stream_name, & StreamType :: UserDefined . to_string ( ) ) . await ?;
177
+
178
+ let schema = resolve_schema ( stream_name) ?;
179
+ let event = format:: json:: Event {
180
+ data : serde_json:: from_slice ( payload) ?,
181
+ tags : String :: default ( ) ,
182
+ metadata : String :: default ( ) ,
183
+ } ;
184
+
185
+ let time_partition = STREAM_INFO . get_time_partition ( stream_name) ?;
186
+ let static_schema_flag = STREAM_INFO . get_static_schema_flag ( stream_name) ?;
187
+
188
+ let ( rb, is_first) = event
189
+ . into_recordbatch ( schema, static_schema_flag, time_partition)
190
+ . map_err ( |err| KafkaError :: PostError ( PostError :: CustomError ( err. to_string ( ) ) ) ) ?;
191
+
192
+ event:: Event {
193
+ rb,
194
+ stream_name : stream_name. to_string ( ) ,
195
+ origin_format : "json" ,
196
+ origin_size : payload. len ( ) as u64 ,
197
+ is_first_event : is_first,
198
+ parsed_timestamp : Utc :: now ( ) . naive_utc ( ) ,
199
+ time_partition : None ,
200
+ custom_partition_values : HashMap :: new ( ) ,
201
+ stream_type : StreamType :: UserDefined ,
203
202
}
203
+ . process ( )
204
+ . await ?;
205
+
204
206
Ok ( ( ) )
205
207
}
206
208
207
209
pub async fn setup_integration ( ) {
208
- tokio:: task:: spawn ( async move {
209
- let ( consumer, stream_name) = match setup_consumer ( ) {
210
- Ok ( c) => c,
211
- Err ( err) => {
212
- match err {
213
- KafkaError :: DoNotPrintError => {
214
- debug ! ( "P_KAFKA_TOPIC not set, skipping kafka integration" ) ;
215
- }
216
- _ => {
217
- error ! ( "{err}" ) ;
218
- }
210
+ let ( consumer, stream_name) = match setup_consumer ( ) {
211
+ Ok ( c) => c,
212
+ Err ( err) => {
213
+ match err {
214
+ KafkaError :: DoNotPrintError => {
215
+ debug ! ( "P_KAFKA_TOPIC not set, skipping kafka integration" ) ;
216
+ }
217
+ _ => {
218
+ error ! ( "{err}" ) ;
219
219
}
220
- return ;
221
220
}
222
- } ;
221
+ return ;
222
+ }
223
+ } ;
223
224
224
- info ! ( "Setup kafka integration for {stream_name}" ) ;
225
- let mut stream = consumer. stream ( ) ;
225
+ info ! ( "Setup kafka integration for {stream_name}" ) ;
226
+ let mut stream = consumer. stream ( ) ;
226
227
227
- while let Ok ( curr) = stream. next ( ) . await . unwrap ( ) {
228
- if let Err ( err) = ingest_message ( & stream_name, curr) . await {
229
- error ! ( "Unable to ingest incoming kafka message- {err}" ) ,
230
- }
228
+ while let Ok ( curr) = stream. next ( ) . await . unwrap ( ) {
229
+ if let Err ( err) = ingest_message ( & stream_name, curr) . await {
230
+ error ! ( "Unable to ingest incoming kafka message- {err}" )
231
231
}
232
- } ) ;
232
+ }
233
233
}
0 commit comments