11# Copyright 2013-2019 Camptocamp SA
22# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl)
33import codecs
4- import hashlib
54import logging
6- import struct
75from itertools import groupby
86
97from odoo import api , exceptions , fields , models
108from odoo .tools .safe_eval import safe_eval
119
10+ from odoo .addons .queue_job .delay import chain
11+ from odoo .addons .queue_job .delay import group as job_group
12+
1213from ..pdf_utils import assemble_pdf
1314from ..zpl_utils import assemble_zpl2 , assemble_zpl2_single_images
1415
@@ -48,6 +49,8 @@ def _get_packs(self, batch):
4849 operations , key = lambda r : r .result_package_id or r .package_id
4950 ):
5051 pack_label = self ._find_pack_label (pack )
52+ # TODO we should maybe try: pack_labelS = _find_pack_labelS(pack)
53+ # then iterate on paclk_labels
5154 yield (
5255 pack ,
5356 self .env ["stock.move.line" ].browse ([o .id for o in grp_operations ]),
@@ -66,21 +69,14 @@ def _do_generate_labels(self, group):
6669 too many threads
6770 """
6871 self .ensure_one ()
69-
72+ jobs = []
7073 for pack , picking , _label in group :
71- try :
72- picking .send_to_shipper ()
73- except Exception as e :
74- # add information on picking and pack in the exception
75- picking_name = self .env ._ (f"Picking: { picking .name } " )
76- pack_name = pack .name if pack else ""
77- pack_num = self .env ._ (f"Pack: { pack_name } " )
78- # pylint: disable=translation-required
79- msg = f"{ picking_name } { pack_num } - { str (e )} "
80- _logger .exception (msg )
81- raise exceptions .UserError (msg ) from e
82-
83- def _get_all_files (self , batch ):
74+ _logger .debug ("Generating label for pack %s" , pack .name )
75+ job = picking .delayable ().send_to_shipper ()
76+ jobs .append (job )
77+ return jobs
78+
79+ def _generate_all_labels (self , batch ):
8480 self .ensure_one ()
8581
8682 # If we have more than one pack in a picking, we must ensure
@@ -94,17 +90,59 @@ def _get_all_files(self, batch):
9490 picking = operations [0 ].picking_id
9591 groups .setdefault (picking .id , []).append ((pack , picking , label ))
9692
93+ jobs = []
9794 for group in groups .values ():
98- self ._do_generate_labels (group )
95+ jobs .extend (self ._do_generate_labels (group ))
96+ return jobs
9997
100- labels = []
101- for pack , _operations , label in self ._get_packs (batch ):
102- label = self ._find_pack_label (pack )
103- if not label :
98+ def generate_pdf_summary (self , batch ):
99+ self .ensure_one ()
100+
101+ zpl2_batch_merge = safe_eval (
102+ self .env ["ir.config_parameter" ].get_param ("zpl2.batch.merge" )
103+ )
104+
105+ labels = []
106+ for pack , _operations , label in self ._get_packs (batch ):
107+ label = self ._find_pack_label (pack )
108+ if not label :
109+ continue
110+ label_name = pack .parcel_tracking or pack .name
111+ labels .append ((label .file_type , label .attachment_id .datas , label_name ))
112+
113+ labels_by_f_type = self ._group_labels_by_file_type (labels )
114+ for f_type , labels in labels_by_f_type .items ():
115+ if f_type == "zpl2" and not zpl2_batch_merge :
116+ # We do not want to merge zpl2
117+ # because too big file can failed on zebra printers
118+ for label in labels :
119+ f_name = label ["name" ]
120+ filename = f"{ f_name } .{ f_type } "
121+ data = {
122+ "name" : filename ,
123+ "res_id" : batch .id ,
124+ "res_model" : "stock.picking.batch" ,
125+ "datas" : label ["data" ],
126+ }
127+ self .env ["ir.attachment" ].create (data )
128+ else :
129+ labels_bin = [
130+ codecs .decode (label ["data" ], "base64" ) for label in labels if label
131+ ]
132+ filename = batch .name + "." + f_type
133+
134+ filedata = self ._concat_files (f_type , labels_bin )
135+ if not filedata :
136+ # Merging of `f_type` not supported, so we cannot
137+ # create the attachment
104138 continue
105- label_name = pack .parcel_tracking or pack .name
106- labels .append ((label .file_type , label .attachment_id .datas , label_name ))
107- return labels
139+ data = {
140+ "name" : filename ,
141+ "res_id" : batch .id ,
142+ "res_model" : "stock.picking.batch" ,
143+ "datas" : codecs .encode (filedata , "base64" ),
144+ }
145+ self .env ["ir.attachment" ].create (data )
108146
109147 def _check_pickings (self ):
110148 """Check pickings have at least one pack"""
@@ -130,24 +168,8 @@ def action_generate_labels(self):
130168 """
131169 self .ensure_one ()
132170
133- hasher = hashlib .sha1 (str (self .id ).encode ())
134- # pg_lock accepts an int8 so we build an hash composed with
135- # contextual information and we throw away some bits
136- int_lock = struct .unpack ("q" , hasher .digest ()[:8 ])
137-
138- self .env .cr .execute ("SELECT pg_try_advisory_xact_lock(%s);" , (int_lock ,))
139- acquired = self .env .cr .fetchone ()[0 ]
140- if not acquired :
141- raise exceptions .UserError (
142- self .env ._ (
143- "Another label generation process is already "
144- "running. Please try again later."
145- )
146- )
171+ # TODO ensure there is no pending jobs for those batches
147172
148- zpl2_batch_merge = safe_eval (
149- self .env ["ir.config_parameter" ].get_param ("zpl2.batch.merge" )
150- )
151173 if not self .batch_ids :
152174 raise exceptions .UserError (self .env ._ ("No picking batch selected" ))
153175
@@ -160,7 +182,13 @@ def action_generate_labels(self):
160182 self ._check_pickings ()
161183
162184 to_generate = self .batch_ids
163- if not self .generate_new_labels :
185+
186+ job_groups = []
187+
188+ if self .generate_new_labels :
189+ job_purge = to_generate .delayable ().purge_tracking_references ()
190+ job_groups .append (job_group (job_purge ))
191+ else :
164192 already_generated_ids = (
165193 self .env ["ir.attachment" ]
166194 .search (
@@ -174,54 +202,69 @@ def action_generate_labels(self):
174202 to_generate = to_generate .filtered (
175203 lambda rec : rec .id not in already_generated_ids
176204 )
177- else :
178- to_generate .purge_tracking_references ()
205+
206+ if not to_generate :
207+ raise exceptions .UserError (
208+ self .env ._ ("No labels to generate for the selected batches." )
209+ )
210+
211+ batch_generate = self .env ["queue.job.batch" ].get_new_batch (
212+ "Generate labels for pickings"
213+ )
214+ batch_summary = self .env ["queue.job.batch" ].get_new_batch (
215+ "Generate summary PDFs"
216+ )
217+
218+ job_summaries = []
179219
180220 for batch in to_generate :
181- labels = self ._get_all_files (batch )
182- labels_by_f_type = self ._group_labels_by_file_type (labels )
183- for f_type , labels in labels_by_f_type .items ():
184- if f_type == "zpl2" and not zpl2_batch_merge :
185- # We do not want to merge zpl2
186- # because too big file can failed on zebra printers
187- for label in labels :
188- f_name = label ["name" ]
189- filename = f"{ f_name } .{ f_type } "
190- data = {
191- "name" : filename ,
192- "res_id" : batch .id ,
193- "res_model" : "stock.picking.batch" ,
194- "datas" : label ["data" ],
195- }
196- self .env ["ir.attachment" ].create (data )
197- else :
198- labels_bin = [
199- codecs .decode (label ["data" ], "base64" )
200- for label in labels
201- if label
202- ]
203- filename = batch .name + "." + f_type
221+ jobs = self .with_context (job_batch = batch_generate )._generate_all_labels (
222+ batch
223+ )
224+ job_groups .append (job_group (* jobs ))
204225
205- filedata = self ._concat_files (f_type , labels_bin )
206- if not filedata :
207- # Merging of `f_type` not supported, so we cannot
208- # create the attachment
209- continue
210- data = {
211- "name" : filename ,
212- "res_id" : batch .id ,
213- "res_model" : "stock.picking.batch" ,
214- "datas" : codecs .encode (filedata , "base64" ),
215- }
216- self .env ["ir.attachment" ].create (data )
226+ job_summary = (
227+ self .with_context (job_batch = batch_summary )
228+ .delayable ()
229+ .generate_pdf_summary (batch )
230+ )
231+ job_groups .append (job_group (job_summary ))
232+ job_summaries .append (job_summary )
233+
234+ # Commit the transaction so that we can retrieve job_ids and delay them
235+ self .env .cr .commit () # pylint: disable=E8102
217236
218- return {
219- "type" : "ir.actions.act_window_close" ,
220- }
237+ chainnable = chain (* job_groups )
238+ chainnable .delay ()
239+
240+ job_ids = batch_summary .job_ids .ids
241+
242+ if len (job_ids ) <= 1 :
243+ return {
244+ "type" : "ir.actions.act_window" ,
245+ "name" : "Job Detail" ,
246+ "res_model" : "queue.job" ,
247+ "view_mode" : "form" ,
248+ "res_id" : job_ids [0 ],
249+ "context" : self .env .context ,
250+ "target" : "new" ,
251+ }
252+ else :
253+ return {
254+ "type" : "ir.actions.act_window" ,
255+ "name" : "Summary Jobs" ,
256+ "res_model" : "queue.job" ,
257+ "view_mode" : "list" ,
258+ "domain" : [("id" , "in" , job_ids )],
259+ "context" : self .env .context ,
260+ "target" : "new" ,
261+ }
221262
222263 @api .model
223264 def _group_labels_by_file_type (self , labels ):
224265 res = {}
266+ _logger .debug ("Grouping %d labels by file type" , len (labels ))
267+ _logger .debug ("Labels: %s" , labels )
225268 for f_type , label , label_name in labels :
226269 res .setdefault (f_type , [])
227270 res [f_type ].append ({"data" : label , "name" : label_name })
0 commit comments