27
27
28
28
from .errors import ResponseError , EntryCreatedError , OperationCompletionError
29
29
30
+ POST_LOGBATCH_RETRY_COUNT = 10
30
31
logger = logging .getLogger (__name__ )
31
32
logger .addHandler (logging .NullHandler ())
32
33
@@ -165,6 +166,7 @@ def __init__(self,
165
166
endpoint ,
166
167
project ,
167
168
token ,
169
+ log_batch_size = 20 ,
168
170
is_skipped_an_issue = True ,
169
171
verify_ssl = True ,
170
172
retries = None ,
@@ -175,12 +177,15 @@ def __init__(self,
175
177
endpoint: endpoint of report portal service.
176
178
project: project name to use for launch names.
177
179
token: authorization token.
180
+ log_batch_size: option to set the maximum number of logs
181
+ that can be processed in one batch
178
182
is_skipped_an_issue: option to mark skipped tests as not
179
183
'To Investigate' items on Server side.
180
184
verify_ssl: option to not verify ssl certificates
181
185
"""
182
- super ( ReportPortalService , self ). __init__ ()
186
+ self . _batch_logs = []
183
187
self .endpoint = endpoint
188
+ self .log_batch_size = log_batch_size
184
189
self .project = project
185
190
self .token = token
186
191
self .is_skipped_an_issue = is_skipped_an_issue
@@ -231,6 +236,9 @@ def finish_launch(self, end_time, status=None, **kwargs):
231
236
Status can be one of the followings:
232
237
(PASSED, FAILED, STOPPED, SKIPPED, RESETED, CANCELLED)
233
238
"""
239
+ # process log batches firstly:
240
+ if self ._batch_logs :
241
+ self .log_batch ([], force = True )
234
242
data = {
235
243
"endTime" : end_time ,
236
244
"status" : status
@@ -395,7 +403,7 @@ def log(self, time, message, level=None, attachment=None, item_id=None):
395
403
logger .debug ("log - ID: %s" , item_id )
396
404
return _get_id (r )
397
405
398
- def log_batch (self , log_data , item_id = None ):
406
+ def log_batch (self , log_data , item_id = None , force = False ):
399
407
"""
400
408
Log batch of messages with attachment.
401
409
@@ -407,11 +415,17 @@ def log_batch(self, log_data, item_id=None):
407
415
name: name of attachment
408
416
data: fileobj or content
409
417
mime: content type for attachment
418
+ item_id: UUID of the test item that owns log_data
419
+ force: Flag that forces client to process all the logs
420
+ stored in self._batch_logs immediately
410
421
"""
422
+ self ._batch_logs += log_data
423
+ if len (self ._batch_logs ) < self .log_batch_size and not force :
424
+ return
411
425
url = uri_join (self .base_url_v2 , "log" )
412
426
413
427
attachments = []
414
- for log_item in log_data :
428
+ for log_item in self . _batch_logs :
415
429
if item_id :
416
430
log_item ["itemUuid" ] = item_id
417
431
log_item ["launchUuid" ] = self .launch_id
@@ -435,30 +449,27 @@ def log_batch(self, log_data, item_id=None):
435
449
files = [(
436
450
"json_request_part" , (
437
451
None ,
438
- json .dumps (log_data ),
452
+ json .dumps (self . _batch_logs ),
439
453
"application/json"
440
454
)
441
455
)]
442
456
files .extend (attachments )
443
- from reportportal_client import POST_LOGBATCH_RETRY_COUNT
444
457
for i in range (POST_LOGBATCH_RETRY_COUNT ):
445
458
try :
446
459
r = self .session .post (
447
460
url = url ,
448
461
files = files ,
449
462
verify = self .verify_ssl
450
463
)
464
+ logger .debug ("log_batch - ID: %s" , item_id )
465
+ logger .debug ("log_batch response: %s" , r .text )
466
+ self ._batch_logs = []
467
+ return _get_data (r )
451
468
except KeyError :
452
469
if i < POST_LOGBATCH_RETRY_COUNT - 1 :
453
470
continue
454
471
else :
455
472
raise
456
- break
457
-
458
- logger .debug ("log_batch - ID: %s" , item_id )
459
- logger .debug ("log_batch response: %s" , r .text )
460
-
461
- return _get_data (r )
462
473
463
474
@staticmethod
464
475
def get_system_information (agent_name = 'agent_name' ):
0 commit comments