@@ -1348,21 +1348,38 @@ impl StreamManager {
13481348 let partition_key = Self :: calculate_partition_key ( & event, & stream. partitioning ) ;
13491349
13501350 // Serialize event data
1351- let serialized_data = match stream. serialization {
1351+ let serialized_data = match & stream. serialization {
13521352 SerializationFormat :: Json => serde_json:: to_vec ( & event) ?,
1353+ #[ cfg( feature = "streaming" ) ]
1354+ SerializationFormat :: MessagePack => {
1355+ rmp_serde:: to_vec ( & event) . map_err ( |e| HammerworkError :: Streaming {
1356+ message : format ! ( "MessagePack serialization failed: {}" , e) ,
1357+ } ) ?
1358+ }
1359+ #[ cfg( not( feature = "streaming" ) ) ]
13531360 SerializationFormat :: MessagePack => {
13541361 return Err ( HammerworkError :: Streaming {
1355- message : "MessagePack serialization not implemented " . to_string ( ) ,
1362+ message : "MessagePack serialization requires 'streaming' feature " . to_string ( ) ,
13561363 } ) ;
13571364 }
1365+ #[ cfg( feature = "streaming" ) ]
1366+ SerializationFormat :: Avro { schema_registry_url } => {
1367+ Self :: serialize_avro ( & event, schema_registry_url) ?
1368+ }
1369+ #[ cfg( not( feature = "streaming" ) ) ]
13581370 SerializationFormat :: Avro { .. } => {
13591371 return Err ( HammerworkError :: Streaming {
1360- message : "Avro serialization not implemented " . to_string ( ) ,
1372+ message : "Avro serialization requires 'streaming' feature " . to_string ( ) ,
13611373 } ) ;
13621374 }
1375+ #[ cfg( feature = "streaming" ) ]
1376+ SerializationFormat :: Protobuf { schema_definition } => {
1377+ Self :: serialize_protobuf ( & event, schema_definition) ?
1378+ }
1379+ #[ cfg( not( feature = "streaming" ) ) ]
13631380 SerializationFormat :: Protobuf { .. } => {
13641381 return Err ( HammerworkError :: Streaming {
1365- message : "Protobuf serialization not implemented " . to_string ( ) ,
1382+ message : "Protobuf serialization requires 'streaming' feature " . to_string ( ) ,
13661383 } ) ;
13671384 }
13681385 } ;
@@ -1383,6 +1400,99 @@ impl StreamManager {
13831400 } )
13841401 }
13851402
1403+ #[ cfg( feature = "streaming" ) ]
1404+ /// Serialize event data using Avro format
1405+ fn serialize_avro (
1406+ event : & JobLifecycleEvent ,
1407+ _schema_registry_url : & str ,
1408+ ) -> crate :: Result < Vec < u8 > > {
1409+ use apache_avro:: { Writer , Schema } ;
1410+
1411+ // Define Avro schema for JobLifecycleEvent
1412+ let schema = Schema :: parse_str ( r#"
1413+ {
1414+ "type": "record",
1415+ "name": "JobLifecycleEvent",
1416+ "fields": [
1417+ {"name": "job_id", "type": "string"},
1418+ {"name": "queue_name", "type": "string"},
1419+ {"name": "event_type", "type": "string"},
1420+ {"name": "priority", "type": "string"},
1421+ {"name": "timestamp", "type": "string"},
1422+ {"name": "metadata", "type": {"type": "map", "values": "string"}}
1423+ ]
1424+ }
1425+ "# ) . map_err ( |e| HammerworkError :: Streaming {
1426+ message : format ! ( "Failed to parse Avro schema: {}" , e) ,
1427+ } ) ?;
1428+
1429+ let mut writer = Writer :: new ( & schema, Vec :: new ( ) ) ;
1430+
1431+ // Create Avro record
1432+ let mut record = apache_avro:: types:: Record :: new ( & schema) . ok_or_else ( || {
1433+ HammerworkError :: Streaming {
1434+ message : "Failed to create Avro record" . to_string ( ) ,
1435+ }
1436+ } ) ?;
1437+
1438+ record. put ( "job_id" , event. job_id . to_string ( ) ) ;
1439+ record. put ( "queue_name" , event. queue_name . clone ( ) ) ;
1440+ record. put ( "event_type" , event. event_type . to_string ( ) ) ;
1441+ record. put ( "priority" , event. priority . to_string ( ) ) ;
1442+ record. put ( "timestamp" , event. timestamp . to_rfc3339 ( ) ) ;
1443+ record. put ( "metadata" , event. metadata . clone ( ) ) ;
1444+
1445+ writer. append ( record) . map_err ( |e| HammerworkError :: Streaming {
1446+ message : format ! ( "Failed to append Avro record: {}" , e) ,
1447+ } ) ?;
1448+
1449+ writer. into_inner ( ) . map_err ( |e| HammerworkError :: Streaming {
1450+ message : format ! ( "Failed to finalize Avro writer: {}" , e) ,
1451+ } )
1452+ }
1453+
1454+ #[ cfg( feature = "streaming" ) ]
1455+ /// Serialize event data using Protobuf format
1456+ fn serialize_protobuf (
1457+ event : & JobLifecycleEvent ,
1458+ _schema_definition : & str ,
1459+ ) -> crate :: Result < Vec < u8 > > {
1460+ use prost:: Message ;
1461+
1462+ // Define protobuf message structure
1463+ #[ derive( prost:: Message ) ]
1464+ struct JobLifecycleEventProto {
1465+ #[ prost( string, tag = "1" ) ]
1466+ job_id : String ,
1467+ #[ prost( string, tag = "2" ) ]
1468+ queue_name : String ,
1469+ #[ prost( string, tag = "3" ) ]
1470+ event_type : String ,
1471+ #[ prost( string, tag = "4" ) ]
1472+ priority : String ,
1473+ #[ prost( string, tag = "5" ) ]
1474+ timestamp : String ,
1475+ #[ prost( map = "string, string" , tag = "6" ) ]
1476+ metadata : std:: collections:: HashMap < String , String > ,
1477+ }
1478+
1479+ let proto_event = JobLifecycleEventProto {
1480+ job_id : event. job_id . to_string ( ) ,
1481+ queue_name : event. queue_name . clone ( ) ,
1482+ event_type : event. event_type . to_string ( ) ,
1483+ priority : event. priority . to_string ( ) ,
1484+ timestamp : event. timestamp . to_rfc3339 ( ) ,
1485+ metadata : event. metadata . clone ( ) ,
1486+ } ;
1487+
1488+ let mut buf = Vec :: new ( ) ;
1489+ proto_event. encode ( & mut buf) . map_err ( |e| HammerworkError :: Streaming {
1490+ message : format ! ( "Failed to encode Protobuf message: {}" , e) ,
1491+ } ) ?;
1492+
1493+ Ok ( buf)
1494+ }
1495+
13861496 /// Calculate partition key based on partitioning strategy
13871497 fn calculate_partition_key (
13881498 event : & JobLifecycleEvent ,
0 commit comments