|
| 1 | +import pickle |
| 2 | +import os |
| 3 | +import time |
| 4 | +from logging import getLogger |
| 5 | + |
| 6 | +from .config import Settings |
| 7 | +from .mysql_api import MySQLApi |
| 8 | +from .clickhouse_api import ClickhouseApi |
| 9 | +from .utils import GracefulKiller |
| 10 | + |
| 11 | + |
| 12 | +logger = getLogger(__name__) |
| 13 | + |
| 14 | + |
| 15 | +class State: |
| 16 | + |
| 17 | + def __init__(self, file_name): |
| 18 | + self.file_name = file_name |
| 19 | + self.last_process_time = {} |
| 20 | + self.load() |
| 21 | + |
| 22 | + def load(self): |
| 23 | + file_name = self.file_name |
| 24 | + if not os.path.exists(file_name): |
| 25 | + return |
| 26 | + data = open(file_name, 'rb').read() |
| 27 | + data = pickle.loads(data) |
| 28 | + self.last_process_time = data['last_process_time'] |
| 29 | + |
| 30 | + def save(self): |
| 31 | + file_name = self.file_name |
| 32 | + data = pickle.dumps({ |
| 33 | + 'last_process_time': self.last_process_time, |
| 34 | + }) |
| 35 | + with open(file_name + '.tmp', 'wb') as f: |
| 36 | + f.write(data) |
| 37 | + os.rename(file_name + '.tmp', file_name) |
| 38 | + |
| 39 | + |
| 40 | +class DbOptimizer: |
| 41 | + def __init__(self, config: Settings): |
| 42 | + self.state = State(os.path.join( |
| 43 | + config.binlog_replicator.data_dir, |
| 44 | + 'db_optimizer.bin', |
| 45 | + )) |
| 46 | + self.config = config |
| 47 | + self.mysql_api = MySQLApi( |
| 48 | + database=None, |
| 49 | + mysql_settings=config.mysql, |
| 50 | + ) |
| 51 | + self.clickhouse_api = ClickhouseApi( |
| 52 | + database=None, |
| 53 | + clickhouse_settings=config.clickhouse, |
| 54 | + ) |
| 55 | + |
| 56 | + def select_db_to_optimize(self): |
| 57 | + databases = self.mysql_api.get_databases() |
| 58 | + databases = [db for db in databases if self.config.is_database_matches(db)] |
| 59 | + ch_databases = set(self.clickhouse_api.get_databases()) |
| 60 | + |
| 61 | + for db in databases: |
| 62 | + if db not in ch_databases: |
| 63 | + continue |
| 64 | + last_process_time = self.state.last_process_time.get(db, 0.0) |
| 65 | + if time.time() - last_process_time < self.config.optimize_interval: |
| 66 | + continue |
| 67 | + return db |
| 68 | + return None |
| 69 | + |
| 70 | + def optimize_table(self, db_name, table_name): |
| 71 | + logger.info(f'Optimizing table {db_name}.{table_name}') |
| 72 | + self.clickhouse_api.execute_command( |
| 73 | + f'OPTIMIZE TABLE {db_name}.{table_name} FINAL SETTINGS mutations_sync = 2' |
| 74 | + ) |
| 75 | + logger.info('Optimize finished') |
| 76 | + self.state.last_process_time[db_name] = time.time() |
| 77 | + |
| 78 | + def optimize_database(self, db_name): |
| 79 | + self.mysql_api.set_database(db_name) |
| 80 | + tables = self.mysql_api.get_tables() |
| 81 | + tables = [table for table in tables if self.config.is_table_matches(table)] |
| 82 | + |
| 83 | + self.clickhouse_api.execute_command(f'USE {db_name}') |
| 84 | + ch_tables = set(self.clickhouse_api.get_tables()) |
| 85 | + |
| 86 | + for table in tables: |
| 87 | + if table not in ch_tables: |
| 88 | + continue |
| 89 | + self.optimize_table(db_name, table) |
| 90 | + self.state.save() |
| 91 | + |
| 92 | + def run(self): |
| 93 | + logger.info('running optimizer') |
| 94 | + killer = GracefulKiller() |
| 95 | + try: |
| 96 | + while not killer.kill_now: |
| 97 | + db_to_optimize = self.select_db_to_optimize() |
| 98 | + if db_to_optimize is None: |
| 99 | + time.sleep(min(120, self.config.optimize_interval)) |
| 100 | + continue |
| 101 | + self.optimize_database(db_name=db_to_optimize) |
| 102 | + except Exception as e: |
| 103 | + logger.error(f'error {e}', exc_info=True) |
| 104 | + logger.info('optimizer stopped') |
0 commit comments