99import csv
1010import sys
1111import time
12+ import traceback
1213from collections .abc import Generator , Iterable
1314from concurrent .futures import ThreadPoolExecutor , as_completed # noqa
1415from typing import Any , Optional , TextIO , Union
2526
2627from .lib import conf_lib
2728from .lib .internal .rpc_thread import RpcThread
28- from .lib .internal .tools import batch
29+ from .lib .internal .tools import batch , to_xmlid
2930from .logging_config import log
3031
3132try :
@@ -91,6 +92,10 @@ def _read_data_file(
9192 return [], []
9293 except Exception as e :
9394 log .error (f"Failed to read file { file_path } : { e } " )
95+ log .error (f"Exception type: { type (e ).__name__ } " )
96+ log .error (f"Exception args: { e .args } " )
97+
98+ log .error (f"Full traceback: { traceback .format_exc ()} " )
9499 if isinstance (e , ValueError ):
95100 raise
96101 return [], []
@@ -509,8 +514,9 @@ def _create_batch_individually(
509514 source_id = line [uid_index ]
510515 # Sanitize source_id to ensure it's a valid XML ID
511516 from .lib .internal .tools import to_xmlid
517+
512518 sanitized_source_id = to_xmlid (source_id )
513-
519+
514520 # 1. SEARCH BEFORE CREATE
515521 existing_record = model .browse ().env .ref (
516522 f"__export__.{ sanitized_source_id } " , raise_if_not_found = False
@@ -693,16 +699,69 @@ def _execute_load_batch( # noqa: C901
693699 log .debug (f"Load header: { load_header } " )
694700 log .debug (f"Load lines count: { len (load_lines )} " )
695701 if load_lines :
696- log .debug (f"First load line (first 10 fields): { load_lines [0 ][:10 ] if len (load_lines [0 ]) > 10 else load_lines [0 ]} " )
702+ first_line_preview = (
703+ load_lines [0 ][:10 ] if len (load_lines [0 ]) > 10 else load_lines [0 ]
704+ )
705+ log .debug (f"First load line (first 10 fields): { first_line_preview } " )
697706 log .debug (f"Full header: { load_header } " )
698707 # Log the full header and first line for debugging
699708 if len (load_header ) > 10 :
700709 log .debug (f"Full load_header: { load_header } " )
701710 if len (load_lines [0 ]) > 10 :
702711 log .debug (f"Full first load_line: { load_lines [0 ]} " )
703-
712+
713+ # Sanitize the id column values to prevent XML ID constraint violations
714+ sanitized_load_lines = []
715+ for i , line in enumerate (load_lines ):
716+ sanitized_line = list (line )
717+ if uid_index < len (sanitized_line ):
718+ # Sanitize the source_id (which is in the id column)
719+ original_id = sanitized_line [uid_index ]
720+ sanitized_id = to_xmlid (original_id )
721+ sanitized_line [uid_index ] = sanitized_id
722+ if i < 3 : # Only log first 3 lines for debugging
723+ log .debug (
724+ f"Sanitized ID for line { i } : '{ original_id } ' -> '{ sanitized_id } '"
725+ )
726+ else :
727+ if i < 3 : # Only log first 3 lines for debugging
728+ log .warning (
729+ f"Line { i } does not have enough columns for uid_index { uid_index } . "
730+ f"Line has { len (line )} columns."
731+ )
732+ sanitized_load_lines .append (sanitized_line )
733+
734+ # Log sample of sanitized data without large base64 content
735+ log .debug (f"Load header: { load_header } " )
736+ log .debug (f"Load lines count: { len (sanitized_load_lines )} " )
737+ if sanitized_load_lines and len (sanitized_load_lines ) > 0 :
738+ # Show first line but truncate large base64 data
739+ preview_line = []
740+ for i , field_value in enumerate (
741+ sanitized_load_lines [0 ][:10 ]
742+ if len (sanitized_load_lines [0 ]) > 10
743+ else sanitized_load_lines [0 ]
744+ ):
745+ if isinstance (field_value , str ) and len (field_value ) > 100 :
746+ # Truncate large strings (likely base64 data)
747+ preview_line .append (
748+ f"{ field_value [:50 ]} ...[{ len (field_value ) - 100 } chars truncated]...{ field_value [- 50 :]} "
749+ )
750+ else :
751+ preview_line .append (field_value )
752+ log .debug (
753+ f"First load line (first 10 fields, truncated if large): { preview_line } "
754+ )
755+
704756 res = model .load (load_header , sanitized_load_lines , context = context )
705-
757+
758+ # DEBUG: Log detailed information about the load response
759+ log .debug (f"Load response type: { type (res )} " )
760+ log .debug (
761+ f"Load response keys: { list (res .keys ()) if hasattr (res , 'keys' ) else 'Not a dict' } "
762+ )
763+ log .debug (f"Load response full content: { res } " )
764+
706765 # DEBUG: Log what we got back from Odoo
707766 log .debug (
708767 f"Load response - messages: { res .get ('messages' , 'None' )} , "
@@ -718,16 +777,26 @@ def _execute_load_batch( # noqa: C901
718777 log .warning (f"Load operation returned { msg_type } : { msg_text } " )
719778 else :
720779 log .info (f"Load operation returned { msg_type } : { msg_text } " )
721-
722- # Check for any Odoo server errors in the response
780+
781+ # Check for any Odoo server errors in the response that should halt processing
723782 if res .get ("messages" ):
724- error = res ["messages" ][0 ].get ("message" , "Batch load failed." )
725- # Don't raise immediately, log and continue to capture in fail file
726- log .error (f"Odoo server error during load: { error } " )
727-
783+ for message in res ["messages" ]:
784+ msg_type = message .get ("type" , "unknown" )
785+ msg_text = message .get ("message" , "" )
786+ if msg_type == "error" :
787+ # Only raise for actual errors, not warnings
788+ log .error (f"Load operation returned fatal error: { msg_text } " )
789+ raise ValueError (msg_text )
790+ elif msg_type in ["warning" , "info" ]:
791+ log .warning (f"Load operation returned { msg_type } : { msg_text } " )
792+ else :
793+ log .info (f"Load operation returned { msg_type } : { msg_text } " )
794+
728795 created_ids = res .get ("ids" , [])
729- log .debug (f"Expected records: { len (sanitized_load_lines )} , Created records: { len (created_ids )} " )
730-
796+ log .debug (
797+ f"Expected records: { len (sanitized_load_lines )} , Created records: { len (created_ids )} "
798+ )
799+
731800 # Always log detailed information about record creation
732801 if len (created_ids ) != len (sanitized_load_lines ):
733802 log .warning (
@@ -742,7 +811,7 @@ def _execute_load_batch( # noqa: C901
742811 )
743812 # Log the actual data being sent for debugging
744813 if sanitized_load_lines :
745- log .debug (f "First few lines being sent:" )
814+ log .debug ("First few lines being sent:" )
746815 for i , line in enumerate (sanitized_load_lines [:3 ]):
747816 log .debug (f" Line { i } : { dict (zip (load_header , line ))} " )
748817 else :
@@ -765,8 +834,10 @@ def _execute_load_batch( # noqa: C901
765834 log .info (f"Load operation returned { msg_type } : { msg_text } " )
766835
767836 created_ids = res .get ("ids" , [])
768- log .debug (f"Expected records: { len (sanitized_load_lines )} , Created records: { len (created_ids )} " )
769-
837+ log .debug (
838+ f"Expected records: { len (sanitized_load_lines )} , Created records: { len (created_ids )} "
839+ )
840+
770841 # Always log detailed information about record creation
771842 if len (created_ids ) != len (sanitized_load_lines ):
772843 log .warning (
@@ -781,43 +852,49 @@ def _execute_load_batch( # noqa: C901
781852 )
782853 # Log the actual data being sent for debugging
783854 if sanitized_load_lines :
784- log .debug (f "First few lines being sent:" )
855+ log .debug ("First few lines being sent:" )
785856 for i , line in enumerate (sanitized_load_lines [:3 ]):
786857 log .debug (f" Line { i } : { dict (zip (load_header , line ))} " )
787858 else :
788859 log .warning (
789860 f"Partial record creation: { len (created_ids )} /{ len (sanitized_load_lines )} "
790861 f"records were created. Some records may have failed validation."
791862 )
792-
863+
793864 # Instead of raising an exception, capture failures for the fail file
794865 # But still create what records we can
795866 if res .get ("messages" ):
796867 # Extract error information and add to failed_lines to be written to fail file
797868 error_msg = res ["messages" ][0 ].get ("message" , "Batch load failed." )
798869 log .error (f"Capturing load failure for fail file: { error_msg } " )
799- # We'll add the failed lines to aggregated_failed_lines at the end
800-
801- # Use sanitized IDs for the id_map to match what was actually sent to Odoo
802- id_map = {
803- to_xmlid (line [uid_index ]): created_ids [i ] if i < len (created_ids ) else None
804- for i , line in enumerate (current_chunk )
805- }
806-
807- # Remove None entries (failed creations) from id_map
808- id_map = {k : v for k , v in id_map .items () if v is not None }
809-
870+ # We'll add the failed lines to aggregated_failed_lines
871+ # at the end
872+
873+ id_map = {}
874+ for i , line in enumerate (current_chunk ):
875+ # Ensure there's a corresponding created ID and that
876+ # it's a valid integer.
877+ # The 'incompatible type' error happens when the
878+ # value could be None.
879+ if i < len (created_ids ) and created_ids [i ] is not None :
880+ sanitized_id = to_xmlid (line [uid_index ])
881+ db_id = created_ids [i ]
882+ id_map [sanitized_id ] = db_id
883+
884+ # The update call remains the same and will now be type-safe.
885+ aggregated_id_map .update (id_map )
886+
810887 # Log id_map information for debugging
811888 log .debug (f"Created { len (id_map )} records in batch { batch_number } " )
812889 if id_map :
813890 log .debug (f"Sample id_map entries: { dict (list (id_map .items ())[:3 ])} " )
814891 else :
815892 log .warning (f"No id_map entries created for batch { batch_number } " )
816-
893+
817894 # Capture failed lines for writing to fail file
818895 successful_count = len (created_ids )
819896 total_count = len (sanitized_load_lines )
820-
897+
821898 if successful_count < total_count :
822899 failed_count = total_count - successful_count
823900 log .info (f"Capturing { failed_count } failed records for fail file" )
@@ -829,10 +906,10 @@ def _execute_load_batch( # noqa: C901
829906 error_msg = "Record creation failed"
830907 if res .get ("messages" ):
831908 error_msg = res ["messages" ][0 ].get ("message" , error_msg )
832-
909+
833910 failed_line = list (line ) + [f"Load failed: { error_msg } " ]
834911 aggregated_failed_lines .append (failed_line )
835-
912+
836913 aggregated_id_map .update (id_map )
837914 lines_to_process = lines_to_process [chunk_size :]
838915
0 commit comments