1414
1515use axum:: async_trait;
1616use kafka_protocol:: messages:: api_versions_response:: ApiVersion ;
17- use kafka_protocol:: messages:: { ApiKey , ApiVersionsResponse } ;
17+ use kafka_protocol:: messages:: { ApiKey , ApiVersionsResponse , ProduceRequest , ProduceResponse } ;
18+ use kafka_protocol:: messages:: produce_response:: { PartitionProduceResponse , TopicProduceResponse } ;
19+ use kafka_protocol:: records:: { RecordBatchDecoder , RecordBatchEncoder } ;
1820use metadata_struct:: connection:: NetworkConnection ;
1921use network_server:: { command:: Command , common:: packet:: ResponsePackage } ;
2022use protocol:: kafka:: packet:: KafkaPacket ;
2123use protocol:: robust:: RobustMQPacket ;
2224use std:: net:: SocketAddr ;
2325use std:: sync:: Arc ;
26+ use tracing:: log:: { info, warn} ;
27+ use storage_adapter:: storage:: ArcStorageAdapter ;
28+ use crate :: {
29+ common:: error:: Result ,
30+ storage:: message:: { Message , TopicPartition } ,
31+ } ;
32+ use crate :: manager:: offset:: OffsetManager ;
33+ use crate :: storage:: log_reader:: Reader ;
34+ use crate :: storage:: log_writer:: Writer ;
2435
25- #[ derive( Clone , Default ) ]
26- pub struct KafkaCommand { }
36+ pub struct KafkaCommand {
37+ pub writer : Writer ,
38+ pub reader : Reader ,
39+ pub offset_manager : OffsetManager ,
40+ }
2741
2842impl KafkaCommand {
29- fn handle_api_versions ( tcp_connection : & NetworkConnection ) -> ResponsePackage {
43+ pub fn new ( storage_adapter : ArcStorageAdapter ) -> Self {
44+ Self {
45+ writer : Writer :: new ( storage_adapter. clone ( ) ) ,
46+ reader : Reader :: new ( storage_adapter. clone ( ) ) ,
47+ offset_manager : OffsetManager :: new ( ) ,
48+ }
49+ }
50+
51+ fn handle_api_versions ( ) -> Result < KafkaPacket > {
3052 let api_versions: Vec < ApiVersion > = ApiKey :: iter ( )
3153 . map ( |k| {
3254 let range = ApiKey :: valid_versions ( & k) ;
@@ -40,10 +62,79 @@ impl KafkaCommand {
4062 . with_error_code ( 0 )
4163 . with_api_keys ( api_versions)
4264 . with_throttle_time_ms ( 0 ) ;
43- ResponsePackage :: build (
44- tcp_connection. connection_id ,
45- RobustMQPacket :: KAFKA ( KafkaPacket :: ApiVersionResponse ( res) ) ,
46- )
65+ Ok ( KafkaPacket :: ApiVersionResponse ( res) )
66+ }
67+
68+ async fn handle_produce (
69+ & self ,
70+ request : ProduceRequest ,
71+ ) -> Result < KafkaPacket > {
72+ let mut topic_responses = vec ! [ ] ;
73+ let mut message_batch = vec ! [ ] ;
74+
75+ for topic_data in & request. topic_data {
76+ let topic_name = topic_data. name . as_str ( ) ;
77+ let mut partition_responses = vec ! [ ] ;
78+ for partition_data in & topic_data. partition_data {
79+ let partition_index = partition_data. index ;
80+ let mut base_offset = None ;
81+ let mut partition_error_code = 0i16 ;
82+ if let Some ( records_bytes) = & partition_data. records {
83+ let mut records_buf = records_bytes. clone ( ) ;
84+ let result = RecordBatchDecoder :: decode ( & mut records_buf) ;
85+ match result {
86+ Ok ( record_set) => {
87+ let mut log_start_offset = 0i64 ;
88+ if record_set. records . len ( ) > 0 && base_offset. is_none ( ) {
89+ log_start_offset = self . offset_manager . next_offset ( topic_name, partition_index)
90+ . await
91+ . unwrap_or ( 0 ) ;
92+ base_offset = Some ( log_start_offset) ;
93+ }
94+ for record in & record_set. records {
95+ log_start_offset += 1 ;
96+ message_batch. push ( Message {
97+ topic_partition : TopicPartition {
98+ topic : topic_name. to_string ( ) ,
99+ partition : partition_index,
100+ } ,
101+ offset : log_start_offset,
102+ record : Message :: encode ( record. clone ( ) ) ,
103+ } ) ;
104+ }
105+ }
106+ Err ( e) => {
107+ warn ! ( "Failed to decode record batch: {:?}" , e) ;
108+ partition_error_code = 1 ;
109+ }
110+ }
111+ }
112+
113+ let partition_resp = PartitionProduceResponse :: default ( )
114+ . with_index ( partition_index)
115+ . with_error_code ( partition_error_code)
116+ . with_base_offset ( base_offset. unwrap_or_default ( ) )
117+ . with_log_append_time_ms ( -1 ) // message.timestamp.type
118+ . with_log_start_offset ( 0 ) ;
119+
120+ partition_responses. push ( partition_resp) ;
121+ }
122+
123+ let topic_resp = TopicProduceResponse :: default ( )
124+ . with_name ( topic_data. name . clone ( ) )
125+ . with_partition_responses ( partition_responses) ;
126+
127+ topic_responses. push ( topic_resp) ;
128+ }
129+
130+ for msg in message_batch. into_iter ( ) {
131+ self . writer . write ( & msg) . await ?;
132+ }
133+
134+ let response = ProduceResponse :: default ( )
135+ . with_responses ( topic_responses)
136+ . with_throttle_time_ms ( 0 ) ;
137+ Ok ( KafkaPacket :: ProduceResponse ( response) )
47138 }
48139}
49140
@@ -55,17 +146,46 @@ impl Command for KafkaCommand {
55146 _addr : & SocketAddr ,
56147 robust_packet : & RobustMQPacket ,
57148 ) -> Option < ResponsePackage > {
58- let packet = robust_packet. get_kafka_packet ( ) . unwrap ( ) ;
59- match packet. clone ( ) {
60- KafkaPacket :: ApiVersionReq ( _) => Some ( Self :: handle_api_versions ( tcp_connection) ) ,
61- KafkaPacket :: ProduceReq ( _) => None ,
62- KafkaPacket :: FetchReq ( _) => None ,
63- _ => None ,
64- }
149+ let packet = match robust_packet. get_kafka_packet ( ) {
150+ Some ( p) => p,
151+ None => {
152+ warn ! ( "No Kafka packet found in RobustMQPacket" ) ;
153+ return None ;
154+ }
155+ } ;
156+ let kafka_response = match packet {
157+ KafkaPacket :: ApiVersionReq ( _) => match Self :: handle_api_versions ( ) {
158+ Ok ( resp) => resp,
159+ Err ( e) => {
160+ warn ! ( "Failed to build ApiVersionsResponse: {:?}" , e) ;
161+ return None ;
162+ }
163+ } ,
164+ KafkaPacket :: ProduceReq ( req) => match self . handle_produce ( req) . await {
165+ Ok ( resp) => resp,
166+ Err ( e) => {
167+ warn ! ( "Produce handler failed: {:?}" , e) ;
168+ return None ;
169+ }
170+ } ,
171+ KafkaPacket :: FetchReq ( _) => {
172+ warn ! ( "Received Fetch request but Fetch handling is not implemented" ) ;
173+ return None ;
174+ }
175+ other => {
176+ warn ! ( "Unsupported or unhandled Kafka packet: {:?}" , other) ;
177+ return None ;
178+ }
179+ } ;
180+
181+ Some ( ResponsePackage :: build (
182+ tcp_connection. connection_id ,
183+ RobustMQPacket :: KAFKA ( kafka_response) ,
184+ ) )
65185 }
66186}
67187
68- pub fn create_command ( ) -> Arc < Box < dyn Command + Send + Sync > > {
69- let storage: Box < dyn Command + Send + Sync > = Box :: new ( KafkaCommand :: default ( ) ) ;
188+ pub fn create_command ( message_storage_adapter : ArcStorageAdapter ) -> Arc < Box < dyn Command + Send + Sync > > {
189+ let storage: Box < dyn Command + Send + Sync > = Box :: new ( KafkaCommand :: new ( message_storage_adapter ) ) ;
70190 Arc :: new ( storage)
71191}
0 commit comments