Skip to content

Commit 425b1b3

Browse files
Merge pull request #50 from Aiven-Open/daniel.blasina/add_error_messages_to_exceptions
migration: adding error message to Exception explaining what happened
2 parents 57aa3ed + 290b1a5 commit 425b1b3

File tree

1 file changed

+9
-8
lines changed

1 file changed

+9
-8
lines changed

aiven_mysql_migrate/migration.py

Lines changed: 9 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -198,7 +198,7 @@ def _check_database_size(self, max_size: float):
198198
)
199199
source_size = cur.fetchone()["size"] or 0
200200
if source_size > max_size:
201-
raise DatabaseTooLargeException()
201+
raise DatabaseTooLargeException(f"Database is larger than the maximum size of {max_size} bytes")
202202

203203
def _check_bin_log_format(self):
204204
with self.source.cur() as cur:
@@ -215,7 +215,7 @@ def _check_source_target_uuids_aligned(self):
215215
(source_server_uuid,)
216216
)
217217
if cur.fetchone()["included"] != 1:
218-
raise IncompatibleGtidsException()
218+
raise IncompatibleGtidsException("gtid_executed on target server doesn't match gtid from source")
219219

220220
def run_checks(
221221
self,
@@ -361,22 +361,23 @@ def _ensure_target_replica_running(self, check_interval: float = 2.0, retries: i
361361
cur.execute("SHOW SLAVE STATUS")
362362
rows = cur.fetchall()
363363
if not rows:
364-
raise ReplicaSetupException()
364+
raise ReplicaSetupException("SHOW SLAVE STATUS didn't return any rows")
365365

366366
try:
367367
slave_status = next(
368368
row for row in rows
369369
if row["Master_Host"] == self.source.hostname and row["Master_Port"] == self.source.port
370370
)
371371
except StopIteration as e:
372-
raise ReplicaSetupException() from e
372+
raise ReplicaSetupException("Replication didn't start, Master info not available") from e
373373

374374
if slave_status["Slave_IO_Running"] == "Yes" and slave_status["Slave_SQL_Running"] == "Yes":
375375
return
376376

377377
time.sleep(check_interval)
378378

379-
raise ReplicaSetupException()
379+
raise ReplicaSetupException(f"Replication didn't start after {retries} "
380+
f"retries with interval {check_interval} seconds")
380381

381382
def _wait_for_replication(self, *, seconds_behind_master: int = 0, check_interval: float = 2.0):
382383
LOGGER.info("Wait for replication to catch up")
@@ -386,19 +387,19 @@ def _wait_for_replication(self, *, seconds_behind_master: int = 0, check_interva
386387
cur.execute("SHOW SLAVE STATUS")
387388
rows = cur.fetchall()
388389
if not rows:
389-
raise ReplicaSetupException()
390+
raise ReplicaSetupException("SHOW SLAVE STATUS didn't return any rows")
390391

391392
try:
392393
slave_status = next(
393394
row for row in rows
394395
if row["Master_Host"] == self.source.hostname and row["Master_Port"] == self.source.port
395396
)
396397
except StopIteration as e:
397-
raise ReplicaSetupException() from e
398+
raise ReplicaSetupException("Replication didn't catch up, Master info not available") from e
398399

399400
lag = slave_status["Seconds_Behind_Master"]
400401
if lag is None:
401-
raise ReplicaSetupException()
402+
raise ReplicaSetupException("Replication didn't catch up, Seconds_Behind_Master is null")
402403

403404
LOGGER.info("Current replication lag: %s seconds", lag)
404405
if lag <= seconds_behind_master:

0 commit comments

Comments
 (0)