@@ -148,15 +148,16 @@ def validate_database_settings(self):
148148 )
149149
150150 def validate_mysql_structure (self , mysql_structure : TableStructure ):
151- primary_field : TableField = mysql_structure .fields [mysql_structure .primary_key_idx ]
152- if 'not null' not in primary_field .parameters .lower ():
153- logger .warning ('primary key validation failed' )
154- logger .warning (
155- f'\n \n \n !!! WARNING - PRIMARY KEY NULLABLE (field "{ primary_field .name } ", table "{ mysql_structure .table_name } ") !!!\n \n '
156- 'There could be errors replicating nullable primary key\n '
157- 'Please ensure all tables has NOT NULL parameter for primary key\n '
158- 'Or mark tables as skipped, see "exclude_tables" option\n \n \n '
159- )
151+ for key_idx in mysql_structure .primary_key_ids :
152+ primary_field : TableField = mysql_structure .fields [key_idx ]
153+ if 'not null' not in primary_field .parameters .lower ():
154+ logger .warning ('primary key validation failed' )
155+ logger .warning (
156+ f'\n \n \n !!! WARNING - PRIMARY KEY NULLABLE (field "{ primary_field .name } ", table "{ mysql_structure .table_name } ") !!!\n \n '
157+ 'There could be errors replicating nullable primary key\n '
158+ 'Please ensure all tables has NOT NULL parameter for primary key\n '
159+ 'Or mark tables as skipped, see "exclude_tables" option\n \n \n '
160+ )
160161
161162 def run (self ):
162163 try :
@@ -276,29 +277,33 @@ def perform_initial_replication_table(self, table_name):
276277 logger .debug (f'mysql table structure: { mysql_table_structure } ' )
277278 logger .debug (f'clickhouse table structure: { clickhouse_table_structure } ' )
278279
279- field_names = [field .name for field in clickhouse_table_structure .fields ]
280280 field_types = [field .field_type for field in clickhouse_table_structure .fields ]
281281
282- primary_key = clickhouse_table_structure .primary_key
283- primary_key_index = field_names . index ( primary_key )
284- primary_key_type = field_types [primary_key_index ]
282+ primary_keys = clickhouse_table_structure .primary_keys
283+ primary_key_ids = clickhouse_table_structure . primary_key_ids
284+ primary_key_types = [ field_types [key_idx ] for key_idx in primary_key_ids ]
285285
286- logger .debug (f'primary key name: { primary_key } , type: { primary_key_type } ' )
286+ # logger.debug(f'primary key name: {primary_key}, type: {primary_key_type}')
287287
288288 stats_number_of_records = 0
289289 last_stats_dump_time = time .time ()
290290
291291 while True :
292292
293- query_start_value = max_primary_key
294- if 'int' not in primary_key_type .lower () and query_start_value is not None :
295- query_start_value = f"'{ query_start_value } '"
293+ query_start_values = max_primary_key
294+ if query_start_values is not None :
295+ for i in range (len (query_start_values )):
296+ key_type = primary_key_types [i ]
297+ value = query_start_values [i ]
298+ if 'int' not in key_type .lower ():
299+ value = f"'{ value } '"
300+ query_start_values [i ] = value
296301
297302 records = self .mysql_api .get_records (
298303 table_name = table_name ,
299- order_by = primary_key ,
304+ order_by = primary_keys ,
300305 limit = DbReplicator .INITIAL_REPLICATION_BATCH_SIZE ,
301- start_value = query_start_value ,
306+ start_value = query_start_values ,
302307 )
303308 logger .debug (f'extracted { len (records )} records from mysql' )
304309
@@ -311,7 +316,7 @@ def perform_initial_replication_table(self, table_name):
311316 break
312317 self .clickhouse_api .insert (table_name , records , table_structure = clickhouse_table_structure )
313318 for record in records :
314- record_primary_key = record [primary_key_index ]
319+ record_primary_key = [ record [key_idx ] for key_idx in primary_key_ids ]
315320 if max_primary_key is None :
316321 max_primary_key = record_primary_key
317322 else :
@@ -404,6 +409,16 @@ def save_state_if_required(self, force=False):
404409 self .state .tables_last_record_version = self .clickhouse_api .tables_last_record_version
405410 self .state .save ()
406411
412+ def _get_record_id (self , ch_table_structure , record : list ):
413+ result = []
414+ for idx in ch_table_structure .primary_key_ids :
415+ field_type = ch_table_structure .fields [idx ].field_type
416+ if field_type == 'String' :
417+ result .append (f"'{ record [idx ]} '" )
418+ else :
419+ result .append (record [idx ])
420+ return ',' .join (map (str , result ))
421+
407422 def handle_insert_event (self , event : LogEvent ):
408423 if self .config .debug_log_level :
409424 logger .debug (
@@ -418,12 +433,10 @@ def handle_insert_event(self, event: LogEvent):
418433 clickhouse_table_structure = self .state .tables_structure [event .table_name ][1 ]
419434 records = self .converter .convert_records (event .records , mysql_table_structure , clickhouse_table_structure )
420435
421- primary_key_ids = mysql_table_structure .primary_key_idx
422-
423436 current_table_records_to_insert = self .records_to_insert [event .table_name ]
424437 current_table_records_to_delete = self .records_to_delete [event .table_name ]
425438 for record in records :
426- record_id = record [ primary_key_ids ]
439+ record_id = self . _get_record_id ( clickhouse_table_structure , record )
427440 current_table_records_to_insert [record_id ] = record
428441 current_table_records_to_delete .discard (record_id )
429442
@@ -437,16 +450,9 @@ def handle_erase_event(self, event: LogEvent):
437450 self .stats .erase_events_count += 1
438451 self .stats .erase_records_count += len (event .records )
439452
440- table_structure : TableStructure = self .state .tables_structure [event .table_name ][0 ]
441453 table_structure_ch : TableStructure = self .state .tables_structure [event .table_name ][1 ]
442454
443- primary_key_name_idx = table_structure .primary_key_idx
444- field_type_ch = table_structure_ch .fields [primary_key_name_idx ].field_type
445-
446- if field_type_ch == 'String' :
447- keys_to_remove = [f"'{ record [primary_key_name_idx ]} '" for record in event .records ]
448- else :
449- keys_to_remove = [record [primary_key_name_idx ] for record in event .records ]
455+ keys_to_remove = [self ._get_record_id (table_structure_ch , record ) for record in event .records ]
450456
451457 current_table_records_to_insert = self .records_to_insert [event .table_name ]
452458 current_table_records_to_delete = self .records_to_delete [event .table_name ]
@@ -546,12 +552,12 @@ def upload_records(self):
546552 if not keys_to_remove :
547553 continue
548554 table_structure : TableStructure = self .state .tables_structure [table_name ][0 ]
549- primary_key_name = table_structure .primary_key
555+ primary_key_names = table_structure .primary_keys
550556 if self .config .debug_log_level :
551- logger .debug (f'erasing from { table_name } , primary key: { primary_key_name } , values: { keys_to_remove } ' )
557+ logger .debug (f'erasing from { table_name } , primary key: { primary_key_names } , values: { keys_to_remove } ' )
552558 self .clickhouse_api .erase (
553559 table_name = table_name ,
554- field_name = primary_key_name ,
560+ field_name = primary_key_names ,
555561 field_values = keys_to_remove ,
556562 )
557563
0 commit comments