diff --git a/spp_registry_base/__manifest__.py b/spp_registry_base/__manifest__.py index 57c84fdd7..9fd0d2b1c 100644 --- a/spp_registry_base/__manifest__.py +++ b/spp_registry_base/__manifest__.py @@ -27,7 +27,8 @@ ], "assets": { "web.assets_backend": [ - "spp_registry/static/src/import_records/import_records.js", + "spp_registry_base/static/src/import_records/import_records.js", + "spp_registry_base/static/src/js/base_import_patch.js", ], }, "demo": [], diff --git a/spp_registry_base/models/__init__.py b/spp_registry_base/models/__init__.py index 91fed54d4..ed4d85c3a 100644 --- a/spp_registry_base/models/__init__.py +++ b/spp_registry_base/models/__init__.py @@ -1 +1,2 @@ from . import res_partner +# from . import base_import_patch diff --git a/spp_registry_base/models/base_import_patch.py b/spp_registry_base/models/base_import_patch.py new file mode 100644 index 000000000..3197db994 --- /dev/null +++ b/spp_registry_base/models/base_import_patch.py @@ -0,0 +1,68 @@ +import logging + +from odoo import models + +LIMIT_FIELD = "limit" +_logger = logging.getLogger(__name__) + + +class BaseImportPatch(models.TransientModel): + _inherit = "base_import.import" + + def execute_import(self, fields, columns, options, dryrun=False): + """ + Override to include the remainder in batches for both 'Test' and 'Import' + by using Odoo's stepping mechanism, preventing CPU timeouts on large files. + """ + _logger.info("--- execute_import START ---") + self.ensure_one() + + # Determine the current batch size and starting point. + limit = options.get(LIMIT_FIELD, 2000) + skip = options.get("skip", 0) + + # Process the current batch using the parent method. + _logger.info("Calling super().execute_import with options: %s, dryrun: %s", options, dryrun) + # This will either be a test (dryrun) or an actual import. + results = super().execute_import(fields, columns, options, dryrun=dryrun) + + # Check if there are more records to process in subsequent batches. + # We need to get the total file length, not just the current batch. + # The 'skip' and 'limit' in options can affect the result of _read_file, + # so we pass a clean options dict to get the total length. + clean_options = {k: v for k, v in options.items() if k not in ("skip", LIMIT_FIELD)} + file_length, _ = self._read_file(clean_options) + + # The base implementation returns an empty list for `ids` on dryrun, + # so we rely on the number of messages to determine the processed count. + processed_rows = len(results.get("ids", []) or results.get("messages", [])) + has_more = (skip + processed_rows) < file_length and processed_rows > 0 + _logger.info( + "skip: %s, processed_rows: %s, file_length: %s, has_more: %s", skip, processed_rows, file_length, has_more + ) + + if has_more: + # If more records exist, return an action to trigger the next step. + # This works for both dryrun and actual import. + # The JS client expects a simple dictionary with the next 'options'. + next_action = {"options": {**options, "skip": skip + limit}} + next_step = skip + limit + if next_step > file_length: + _logger.info("No More: Setting next_action with default_skip: 0") + next_action = {"options": {**options, "skip": 0}} + results["next_action"] = next_action + else: + _logger.info("HAS MORE: Setting next_action with default_skip: %s", skip + limit) + results["next_action"] = next_action + elif dryrun: + # This is the final batch of a dryrun (Test). + # We must return a next_action that explicitly resets the skip value + # to 0. This tells the client to reset the UI for the real import. + next_action = {"options": {**options, "skip": 0}} + _logger.info("FINAL DRYRUN: Resetting next_action with default_skip: 0") + results["next_action"] = next_action + else: + _logger.info("FINAL IMPORT or NO MORE: No next_action is set.") + + _logger.info("--- execute_import END ---") + return results diff --git a/spp_registry_base/static/src/js/base_import_patch.js b/spp_registry_base/static/src/js/base_import_patch.js new file mode 100644 index 000000000..1b933c3c6 --- /dev/null +++ b/spp_registry_base/static/src/js/base_import_patch.js @@ -0,0 +1,98 @@ +/** @odoo-module **/ + +import {BaseImportModel} from "@base_import/import_model"; + +BaseImportModel.prototype.executeImport = async function (isTest = false, totalSteps, importProgress) { + this.handleInterruption = false; + this._updateComments(); + this.importMessages = []; + + const startRow = this.importOptions.skip; + const importRes = { + ids: [], + fields: this.columns.map((e) => Boolean(e.fieldInfo) && e.fieldInfo.fieldPath), + columns: this.columns.map((e) => e.name.trim().toLowerCase()), + hasError: false, + }; + + // We need to get the total number of records to import to calculate the last batch size. + // The most reliable way is to derive it from the totalSteps and batchSize passed by the ImportAction component. + const totalToImport = + (totalSteps - 1) * this.importOptions.limit + + (this.importOptions.file_length % this.importOptions.limit || this.importOptions.limit); + console.log("Total to import:", totalToImport); + const batchSize = this.importOptions.limit || 2000; + + for (let i = 1; i <= totalSteps + 1; i++) { + if (this.handleInterruption) { + if (importRes.hasError || isTest) { + importRes.nextrow = startRow; + this.setOption("skip", startRow); + } + break; + } + + // Calculate the correct limit for each step. + // This is the key change to ensure the last step processes the remainder. + if (i === totalSteps && totalToImport % batchSize !== 0) { + // Last step: process the remainder of records. + this.importOptionsValues.limit.value = totalToImport % batchSize; + } else { + // For all other steps, use the full batch size. + this.importOptionsValues.limit.value = batchSize; + } + + const error = await this._executeImportStep(isTest, importRes); + if (error) { + const errorData = error.data || {}; + const message = + (errorData.arguments && (errorData.arguments[1] || errorData.arguments[0])) || + "An unknown issue occurred during import. Please retry or try to split the file."; + + // Restore original batch size on error + this.importOptionsValues.limit.value = batchSize; + + if (error.message) { + this._addMessage("danger", [error.message, message]); + } else { + this._addMessage("danger", [message]); + } + + importRes.hasError = true; + break; + } + + if (importProgress) { + importProgress.step = i; + // Update progress value based on the number of records processed so far + const processedCount = Math.min(i * batchSize, totalToImport); + importProgress.value = Math.round((100 * processedCount) / totalToImport); + } + console.log(`Completed step ${i}/${totalSteps}`); + console.log("Next row to process:", importRes.nextrow); + if (i === totalSteps) { + console.log("Final step completed."); + importRes.nextrow = startRow; + this.setOption("skip", 0); + } + } + console.log("Next row!:", importRes.nextrow); + // Restore original batch size after completion + this.importOptionsValues.limit.value = batchSize; + + if (!importRes.hasError) { + if (importRes.nextrow) { + this._addMessage("warning", [ + `Click 'Resume' to proceed with the import, resuming at line ${importRes.nextrow + 1}.`, + "You can test or reload your file before resuming the import.", + ]); + } + if (isTest) { + this._addMessage("info", ["Everything seems valid."]); + } + } else { + importRes.nextrow = startRow; + } + console.log("Import result:", importRes); + return {res: importRes}; +};