@@ -1678,9 +1678,9 @@ def create_parallel_schema(
16781678
16791679 # Get all tables from source schema
16801680 tables_query = """
1681- SELECT TABLE_NAME
1682- FROM information_schema.TABLES
1683- WHERE TABLE_SCHEMA = %s
1681+ SELECT TABLE_NAME
1682+ FROM information_schema.TABLES
1683+ WHERE TABLE_SCHEMA = %s
16841684 ORDER BY TABLE_NAME
16851685 """
16861686 tables = [row [0 ] for row in connection .query (tables_query , args = (source ,)).fetchall ()]
@@ -1751,7 +1751,7 @@ def copy_table_data(
17511751 --------
17521752 >>> # Copy all data
17531753 >>> result = copy_table_data('my_pipeline', 'my_pipeline_v20', 'Mouse')
1754-
1754+
17551755 >>> # Copy sample
17561756 >>> result = copy_table_data(
17571757 ... 'my_pipeline', 'my_pipeline_v20', 'Session',
@@ -1878,12 +1878,10 @@ def compare_query_results(
18781878 continue
18791879
18801880 # Handle floating-point comparison
1881- if col_type in (' float' , ' double' , ' decimal' ):
1881+ if col_type in (" float" , " double" , " decimal" ):
18821882 if abs (float (prod_val ) - float (test_val )) > tolerance :
18831883 result ["match" ] = False
1884- result ["discrepancies" ].append (
1885- f"Row { i } , { col_name } : { prod_val } != { test_val } (diff > { tolerance } )"
1886- )
1884+ result ["discrepancies" ].append (f"Row { i } , { col_name } : { prod_val } != { test_val } (diff > { tolerance } )" )
18871885 else :
18881886 if prod_val != test_val :
18891887 result ["match" ] = False
@@ -1896,9 +1894,7 @@ def compare_query_results(
18961894
18971895 if prod_checksum != test_checksum :
18981896 result ["match" ] = False
1899- result ["discrepancies" ].append (
1900- f"Checksum mismatch: prod={ prod_checksum } , test={ test_checksum } "
1901- )
1897+ result ["discrepancies" ].append (f"Checksum mismatch: prod={ prod_checksum } , test={ test_checksum } " )
19021898
19031899 return result
19041900
@@ -2161,15 +2157,11 @@ def migrate_external_pointers_v2(
21612157 import json
21622158 from datetime import datetime , timezone
21632159 from . import conn as get_conn
2164- from .settings import get_store_spec
21652160
21662161 if connection is None :
21672162 connection = get_conn ()
21682163
2169- logger .info (
2170- f"Migrating external pointers: { schema } .{ table } .{ attribute } "
2171- f"({ source_store } → { dest_store } )"
2172- )
2164+ logger .info (f"Migrating external pointers: { schema } .{ table } .{ attribute } " f"({ source_store } → { dest_store } )" )
21732165
21742166 # Get source store specification (0.14.6)
21752167 # Note: This assumes old external table exists
@@ -2188,9 +2180,6 @@ def migrate_external_pointers_v2(
21882180 f"Cannot migrate external pointers from 0.14.6 format."
21892181 )
21902182
2191- # Get dest store spec for path construction
2192- dest_spec = get_store_spec (dest_store )
2193-
21942183 result = {
21952184 "rows_migrated" : 0 ,
21962185 "files_copied" : 0 ,
@@ -2237,9 +2226,7 @@ def migrate_external_pointers_v2(
22372226 file_info = connection .query (lookup_query , args = (uuid_bytes ,)).fetchone ()
22382227
22392228 if file_info is None :
2240- result ["errors" ].append (
2241- f"External file not found for UUID: { uuid_bytes .hex ()} "
2242- )
2229+ result ["errors" ].append (f"External file not found for UUID: { uuid_bytes .hex ()} " )
22432230 continue
22442231
22452232 hash_hex , size , timestamp , filepath = file_info
@@ -2301,8 +2288,6 @@ def migrate_external_pointers_v2(
23012288 # This requires knowing source and dest store locations
23022289 logger .warning ("File copying not yet implemented in migrate_external_pointers_v2" )
23032290
2304- logger .info (
2305- f"Migrated { result ['rows_migrated' ]} external pointers for { schema } .{ table } .{ attribute } "
2306- )
2291+ logger .info (f"Migrated { result ['rows_migrated' ]} external pointers for { schema } .{ table } .{ attribute } " )
23072292
23082293 return result
0 commit comments