10
10
import re
11
11
import os
12
12
from functools import partial
13
+ import multiprocessing
13
14
14
15
try :
15
16
# py 311 adds this library natively
38
39
del_outdated_generated_files ,
39
40
)
40
41
from .conf import CONF_NAME
42
+ from .package_utils import create_package , change_log_generate , extract_breaking_change , get_version_info , check_file
41
43
42
44
logging .basicConfig (
43
45
stream = sys .stdout ,
47
49
_LOGGER = logging .getLogger (__name__ )
48
50
49
51
52
+ def execute_func_with_timeout (func , timeout : int = 900 ) -> Any :
53
+ """Execute function with timeout"""
54
+ return multiprocessing .Pool (processes = 1 ).apply_async (func ).get (timeout )
55
+
56
+
50
57
def is_multiapi_package (python_md_content : List [str ]) -> bool :
51
58
for line in python_md_content :
52
59
if re .findall (r"\s*multiapi\s*:\s*true" , line ):
@@ -258,7 +265,7 @@ def main(generate_input, generate_output):
258
265
config = gen_dpg (readme_or_tsp , data .get ("autorestConfig" , "" ), dpg_relative_folder (spec_folder ))
259
266
_LOGGER .info (f"code generation cost time: { int (time .time () - code_generation_start_time )} seconds" )
260
267
except Exception as e :
261
- _LOGGER .error (f"fail to generate sdk for { readme_or_tsp } : { str (e )} " )
268
+ _LOGGER .error (f"Fail to generate sdk for { readme_or_tsp } : { str (e )} " )
262
269
for hint_message in [
263
270
"======================================= Whant Can I do (begin) ========================================================================" ,
264
271
f"Fail to generate sdk for { readme_or_tsp } . If you are from service team, please first check if the failure happens only to Python automation, or for all SDK automations. " ,
@@ -293,43 +300,157 @@ def main(generate_input, generate_output):
293
300
package_entry ["isMultiapi" ] = is_multiapi_package (readme_python_content )
294
301
package_entry ["targetReleaseDate" ] = data .get ("targetReleaseDate" , "" )
295
302
package_entry ["allowInvalidNextVersion" ] = data .get ("allowInvalidNextVersion" , False )
296
- package_entry ["runInPipeline" ] = run_in_pipeline
297
303
result [package_name ] = package_entry
298
304
else :
299
305
result [package_name ]["path" ].append (folder_name )
300
306
result [package_name ][spec_word ].append (readme_or_tsp )
307
+ except Exception as e :
308
+ _LOGGER .error (f"Fail to process package { package_name } in { readme_or_tsp } : { str (e )} " )
309
+ continue
301
310
302
- # Generate some necessary file for new service
311
+ # Generate some necessary file for new service
312
+ try :
303
313
init_new_service (package_name , folder_name )
314
+ except Exception as e :
315
+ _LOGGER .warning (f"Fail to init new service { package_name } in { readme_or_tsp } : { str (e )} " )
316
+
317
+ # format samples and tests
318
+ try :
304
319
format_samples_and_tests (sdk_code_path )
320
+ except Exception as e :
321
+ _LOGGER .warning (f"Fail to format samples and tests for { package_name } in { readme_or_tsp } : { str (e )} " )
305
322
306
- # Update metadata
307
- try :
308
- update_servicemetadata (
309
- sdk_folder ,
310
- data ,
311
- config ,
312
- folder_name ,
313
- package_name ,
314
- spec_folder ,
315
- readme_or_tsp ,
316
- )
317
- except Exception as e :
318
- _LOGGER .error (f"fail to update meta: { str (e )} " )
323
+ # Update metadata
324
+ try :
325
+ update_servicemetadata (
326
+ sdk_folder ,
327
+ data ,
328
+ config ,
329
+ folder_name ,
330
+ package_name ,
331
+ spec_folder ,
332
+ readme_or_tsp ,
333
+ )
334
+ except Exception as e :
335
+ _LOGGER .warning (f"Fail to update meta: { str (e )} " )
319
336
320
- # Setup package locally
337
+ # Setup package locally
338
+ try :
321
339
check_call (
322
340
f"pip install --ignore-requires-python -e { sdk_code_path } " ,
323
341
shell = True ,
324
342
)
343
+ except Exception as e :
344
+ _LOGGER .warning (f"Fail to setup package { package_name } in { readme_or_tsp } : { str (e )} " )
345
+
346
+ # check whether multiapi package has only one api-version in per subfolder
347
+ try :
348
+ if result [package_name ]["isMultiapi" ]:
349
+ check_api_version_in_subfolder (sdk_code_path )
350
+ except Exception as e :
351
+ _LOGGER .warning (
352
+ f"Fail to check api version in subfolder for { package_name } in { readme_or_tsp } : { str (e )} "
353
+ )
354
+
355
+ # Changelog generation
356
+ try :
357
+ last_version , last_stable_release = get_version_info (package_name , result [package_name ]["tagIsStable" ])
358
+ change_log_func = partial (
359
+ change_log_generate ,
360
+ package_name ,
361
+ last_version ,
362
+ result [package_name ]["tagIsStable" ],
363
+ last_stable_release = last_stable_release ,
364
+ prefolder = folder_name ,
365
+ is_multiapi = result [package_name ]["isMultiapi" ],
366
+ )
367
+
368
+ changelog_generation_start_time = time .time ()
369
+ try :
370
+ md_output = execute_func_with_timeout (change_log_func )
371
+ except multiprocessing .TimeoutError :
372
+ md_output = "change log generation was timeout!!! You need to write it manually!!!"
373
+ except :
374
+ md_output = "change log generation failed!!! You need to write it manually!!!"
375
+ finally :
376
+ for file in ["stable.json" , "current.json" ]:
377
+ file_path = Path (sdk_folder , folder_name , package_name , file )
378
+ if file_path .exists ():
379
+ os .remove (file_path )
380
+ _LOGGER .info (f"Remove { file_path } which is temp file to generate changelog." )
381
+
382
+ _LOGGER .info (
383
+ f"changelog generation cost time: { int (time .time () - changelog_generation_start_time )} seconds"
384
+ )
385
+ result [package_name ]["changelog" ] = {
386
+ "content" : md_output ,
387
+ "hasBreakingChange" : "Breaking Changes" in md_output ,
388
+ "breakingChangeItems" : extract_breaking_change (md_output ),
389
+ }
390
+ result [package_name ]["version" ] = last_version
391
+
392
+ _LOGGER .info (f"[PACKAGE]({ package_name } )[CHANGELOG]:{ md_output } " )
393
+ except Exception as e :
394
+ _LOGGER .warning (f"Fail to generate changelog for { package_name } in { readme_or_tsp } : { str (e )} " )
395
+
396
+ # Generate ApiView
397
+ if run_in_pipeline :
398
+ apiview_start_time = time .time ()
399
+ try :
400
+ package_path = Path (sdk_folder , folder_name , package_name )
401
+ check_call (
402
+ [
403
+ "python" ,
404
+ "-m" ,
405
+ "pip" ,
406
+ "install" ,
407
+ "-r" ,
408
+ "../../../eng/apiview_reqs.txt" ,
409
+ "--index-url=https://pkgs.dev.azure.com/azure-sdk/public/_packaging/azure-sdk-for-python/pypi"
410
+ "/simple/" ,
411
+ ],
412
+ cwd = package_path ,
413
+ timeout = 600 ,
414
+ )
415
+ check_call (["apistubgen" , "--pkg-path" , "." ], cwd = package_path , timeout = 600 )
416
+ for file in os .listdir (package_path ):
417
+ if "_python.json" in file and package_name in file :
418
+ result [package_name ]["apiViewArtifact" ] = str (Path (package_path , file ))
419
+ except Exception as e :
420
+ _LOGGER .debug (f"Fail to generate ApiView token file for { package_name } : { e } " )
421
+ _LOGGER .info (f"apiview generation cost time: { int (time .time () - apiview_start_time )} seconds" )
422
+ else :
423
+ _LOGGER .info ("Skip ApiView generation for package that does not run in pipeline." )
325
424
326
- # check whether multiapi package has only one api-version in per subfolder
327
- check_api_version_in_subfolder (sdk_code_path )
425
+ # check generated files and update package["version"]
426
+ if package_name .startswith ("azure-mgmt-" ):
427
+ try :
428
+ check_file (result [package_name ])
429
+ except Exception as e :
430
+ _LOGGER .warning (f"Fail to check generated files for { package_name } : { e } " )
328
431
329
- # could be removed in the short future
330
- result [package_name ]["afterMultiapiCombiner" ] = False
432
+ # Build artifacts for package
433
+ try :
434
+ create_package (result [package_name ]["path" ][0 ], package_name )
435
+ dist_path = Path (sdk_folder , folder_name , package_name , "dist" )
436
+ result [package_name ]["artifacts" ] = [
437
+ str (dist_path / package_file ) for package_file in os .listdir (dist_path )
438
+ ]
439
+ for artifact in result [package_name ]["artifacts" ]:
440
+ if ".whl" in artifact :
441
+ result [package_name ]["language" ] = "Python"
442
+ break
443
+ _LOGGER .info (f"Built package { package_name } successfully." )
331
444
except Exception as e :
332
- _LOGGER .error (f"fail to setup package: { str (e )} " )
445
+ _LOGGER .warning (f"Fail to build package { package_name } in { readme_or_tsp } : { str (e )} " )
446
+
447
+ # update result
448
+ result [package_name ]["installInstructions" ] = {
449
+ "full" : "You can use pip to install the artifacts." ,
450
+ "lite" : f"pip install { package_name } " ,
451
+ }
452
+ result [package_name ]["result" ] = "succeeded"
453
+ result [package_name ]["packageFolder" ] = result [package_name ]["path" ][0 ]
333
454
334
455
# remove duplicates
335
456
try :
@@ -340,13 +461,23 @@ def main(generate_input, generate_output):
340
461
if value .get ("readmeMd" ):
341
462
value ["readmeMd" ] = list (set (value ["readmeMd" ]))
342
463
except Exception as e :
343
- _LOGGER .error (f"fail to remove duplicates: { str (e )} " )
464
+ _LOGGER .warning (f"Fail to remove duplicates: { str (e )} " )
344
465
345
466
if len (result ) == 0 and len (readme_and_tsp ) > 1 :
346
467
raise Exception ("No package is generated, please check the log for details" )
347
468
469
+ if len (result ) == 0 :
470
+ _LOGGER .info ("No packages to process, returning empty result" )
471
+ else :
472
+ _LOGGER .info (f"Processing { len (result )} generated packages..." )
473
+
474
+ final_result = {"packages" : list (result .values ())}
348
475
with open (generate_output , "w" ) as writer :
349
- json .dump (result , writer )
476
+ json .dump (final_result , writer , indent = 2 )
477
+
478
+ _LOGGER .info (
479
+ f"Congratulations! Succeed to build package for { [p ['packageName' ] for p in final_result ['packages' ]]} . And you shall be able to see the generated code when running 'git status'."
480
+ )
350
481
351
482
352
483
def generate_main ():
0 commit comments