@@ -579,8 +579,19 @@ def _handle_create_error(
579579 error_str = str (create_error )
580580 error_str_lower = error_str .lower ()
581581
582- # Handle database connection pool exhaustion errors
582+ # Handle constraint violation errors (e.g., XML ID space constraint)
583583 if (
584+ "constraint" in error_str_lower
585+ or "check constraint" in error_str_lower
586+ or "nospaces" in error_str_lower
587+ or "violation" in error_str_lower
588+ ):
589+ error_message = f"Constraint violation in row { i + 1 } : { create_error } "
590+ if "Fell back to create" in error_summary :
591+ error_summary = "Database constraint violation detected"
592+
593+ # Handle database connection pool exhaustion errors
594+ elif (
584595 "connection pool is full" in error_str_lower
585596 or "too many connections" in error_str_lower
586597 or "poolerror" in error_str_lower
@@ -670,13 +681,17 @@ def _create_batch_individually( # noqa: C901
670681 )
671682
672683 source_id = line [uid_index ]
684+ # Sanitize source_id to ensure it's a valid XML ID
685+ from .lib .internal .tools import to_xmlid
686+ sanitized_source_id = to_xmlid (source_id )
687+
673688 # 1. SEARCH BEFORE CREATE
674689 existing_record = model .browse ().env .ref (
675- f"__export__.{ source_id } " , raise_if_not_found = False
690+ f"__export__.{ sanitized_source_id } " , raise_if_not_found = False
676691 )
677692
678693 if existing_record :
679- id_map [source_id ] = existing_record .id
694+ id_map [sanitized_source_id ] = existing_record .id
680695 continue
681696
682697 # 2. PREPARE FOR CREATE
@@ -713,7 +728,7 @@ def _create_batch_individually( # noqa: C901
713728 log .debug (f"Converted vals keys: { list (converted_vals .keys ())} " )
714729
715730 new_record = model .create (converted_vals , context = context )
716- id_map [source_id ] = new_record .id
731+ id_map [sanitized_source_id ] = new_record .id
717732 except IndexError as e :
718733 error_str_lower = str (e ).lower ()
719734
@@ -916,17 +931,120 @@ def _execute_load_batch( # noqa: C901
916931 log .debug (f"Attempting `load` for chunk of batch { batch_number } ..." )
917932
918933 res = model .load (load_header , load_lines , context = context )
934+
919935 if res .get ("messages" ):
920936 error = res ["messages" ][0 ].get ("message" , "Batch load failed." )
921- raise ValueError (error )
922-
937+ # Don't raise immediately, log and continue to capture in fail file
938+ log .error (f"Odoo server error during load: { error } " )
939+
923940 created_ids = res .get ("ids" , [])
924- if len (created_ids ) != len (load_lines ):
925- raise ValueError ("Record count mismatch after load." )
941+ log .debug (f"Expected records: { len (sanitized_load_lines )} , Created records: { len (created_ids )} " )
942+
943+ # Always log detailed information about record creation
944+ if len (created_ids ) != len (sanitized_load_lines ):
945+ log .warning (
946+ f"Record creation mismatch: Expected { len (sanitized_load_lines )} records, "
947+ f"but only { len (created_ids )} were created"
948+ )
949+ if len (created_ids ) == 0 :
950+ log .error (
951+ f"No records were created in this batch of { len (sanitized_load_lines )} . "
952+ f"This may indicate silent failures in the Odoo load operation. "
953+ f"Check Odoo server logs for validation errors."
954+ )
955+ # Log the actual data being sent for debugging
956+ if sanitized_load_lines :
957+ log .debug (f"First few lines being sent:" )
958+ for i , line in enumerate (sanitized_load_lines [:3 ]):
959+ log .debug (f" Line { i } : { dict (zip (load_header , line ))} " )
960+ else :
961+ log .warning (
962+ f"Partial record creation: { len (created_ids )} /{ len (sanitized_load_lines )} "
963+ f"records were created. Some records may have failed validation."
964+ )
965+ # Check for any Odoo server errors in the response that should halt processing
966+ if res .get ("messages" ):
967+ for message in res ["messages" ]:
968+ msg_type = message .get ("type" , "unknown" )
969+ msg_text = message .get ("message" , "" )
970+ if msg_type == "error" :
971+ # Only raise for actual errors, not warnings
972+ log .error (f"Load operation returned fatal error: { msg_text } " )
973+ raise ValueError (msg_text )
974+ elif msg_type in ["warning" , "info" ]:
975+ log .warning (f"Load operation returned { msg_type } : { msg_text } " )
976+ else :
977+ log .info (f"Load operation returned { msg_type } : { msg_text } " )
926978
979+ created_ids = res .get ("ids" , [])
980+ log .debug (f"Expected records: { len (sanitized_load_lines )} , Created records: { len (created_ids )} " )
981+
982+ # Always log detailed information about record creation
983+ if len (created_ids ) != len (sanitized_load_lines ):
984+ log .warning (
985+ f"Record creation mismatch: Expected { len (sanitized_load_lines )} records, "
986+ f"but only { len (created_ids )} were created"
987+ )
988+ if len (created_ids ) == 0 :
989+ log .error (
990+ f"No records were created in this batch of { len (sanitized_load_lines )} . "
991+ f"This may indicate silent failures in the Odoo load operation. "
992+ f"Check Odoo server logs for validation errors."
993+ )
994+ # Log the actual data being sent for debugging
995+ if sanitized_load_lines :
996+ log .debug (f"First few lines being sent:" )
997+ for i , line in enumerate (sanitized_load_lines [:3 ]):
998+ log .debug (f" Line { i } : { dict (zip (load_header , line ))} " )
999+ else :
1000+ log .warning (
1001+ f"Partial record creation: { len (created_ids )} /{ len (sanitized_load_lines )} "
1002+ f"records were created. Some records may have failed validation."
1003+ )
1004+
1005+ # Instead of raising an exception, capture failures for the fail file
1006+ # But still create what records we can
1007+ if res .get ("messages" ):
1008+ # Extract error information and add to failed_lines to be written to fail file
1009+ error_msg = res ["messages" ][0 ].get ("message" , "Batch load failed." )
1010+ log .error (f"Capturing load failure for fail file: { error_msg } " )
1011+ # We'll add the failed lines to aggregated_failed_lines at the end
1012+
1013+ # Use sanitized IDs for the id_map to match what was actually sent to Odoo
9271014 id_map = {
928- line [uid_index ]: created_ids [i ] for i , line in enumerate (current_chunk )
1015+ to_xmlid (line [uid_index ]): created_ids [i ] if i < len (created_ids ) else None
1016+ for i , line in enumerate (current_chunk )
9291017 }
1018+
1019+ # Remove None entries (failed creations) from id_map
1020+ id_map = {k : v for k , v in id_map .items () if v is not None }
1021+
1022+ # Log id_map information for debugging
1023+ log .debug (f"Created { len (id_map )} records in batch { batch_number } " )
1024+ if id_map :
1025+ log .debug (f"Sample id_map entries: { dict (list (id_map .items ())[:3 ])} " )
1026+ else :
1027+ log .warning (f"No id_map entries created for batch { batch_number } " )
1028+
1029+ # Capture failed lines for writing to fail file
1030+ successful_count = len (created_ids )
1031+ total_count = len (sanitized_load_lines )
1032+
1033+ if successful_count < total_count :
1034+ failed_count = total_count - successful_count
1035+ log .info (f"Capturing { failed_count } failed records for fail file" )
1036+ # Add error information to the lines that failed
1037+ for i , line in enumerate (current_chunk ):
1038+ # Check if this line corresponds to a created record
1039+ if i >= len (created_ids ) or created_ids [i ] is None :
1040+ # This record failed, add it to failed_lines with error info
1041+ error_msg = "Record creation failed"
1042+ if res .get ("messages" ):
1043+ error_msg = res ["messages" ][0 ].get ("message" , error_msg )
1044+
1045+ failed_line = list (line ) + [f"Load failed: { error_msg } " ]
1046+ aggregated_failed_lines .append (failed_line )
1047+
9301048 aggregated_id_map .update (id_map )
9311049 lines_to_process = lines_to_process [chunk_size :]
9321050
0 commit comments