Skip to content

Commit 0e9354e

Browse files
msyycCopilot
andauthored
[sdk generation pipeline] simplify generation scripts (#41708)
* Initial plan for issue * Merge sdk_package functionality into sdk_generator Co-authored-by: msyyc <[email protected]> * Fix indentation and improve edge case handling for empty results Co-authored-by: msyyc <[email protected]> * Delete sdk_package.py and remove entry point as functionality is now integrated into sdk_generator.py Co-authored-by: msyyc <[email protected]> * Optimize flow by integrating packaging directly after generation Co-authored-by: msyyc <[email protected]> * remove afterMultiapiCombiner * update * update * update * update * update instructions * Update main.py --------- Co-authored-by: copilot-swe-agent[bot] <[email protected]>
1 parent 051c6e3 commit 0e9354e

File tree

6 files changed

+165
-248
lines changed

6 files changed

+165
-248
lines changed

scripts/auto_release/main.py

Lines changed: 2 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,6 @@ def __init__(self):
8989
self.pr_number = 0
9090
self.tag_is_stable = False
9191
self.has_test = False
92-
self.check_package_size_result = [] # List[str]
9392
self.version_suggestion = "" # if can't calculate next version, give a suggestion
9493

9594
@property
@@ -165,11 +164,9 @@ def generate_code(self):
165164
print_check(f"python -m packaging_tools.sdk_generator {self.autorest_result} {self.autorest_result}")
166165

167166
generate_result = self.get_autorest_result()
168-
self.tag_is_stable = list(generate_result.values())[0]["tagIsStable"]
167+
self.tag_is_stable = generate_result["packages"][0]["tagIsStable"]
169168
log(f"tag_is_stable is {self.tag_is_stable}")
170169

171-
print_check(f"python -m packaging_tools.sdk_package {self.autorest_result} {self.autorest_result}")
172-
173170
def get_package_name_with_autorest_result(self):
174171
generate_result = self.get_autorest_result()
175172
self.whole_package_name = generate_result["packages"][0]["packageName"]
@@ -210,15 +207,6 @@ def get_last_release_version(self) -> str:
210207
except:
211208
return ""
212209

213-
def check_package_size(self):
214-
if self.after_multiapi_combiner:
215-
packages = self.get_private_package()
216-
for package in packages:
217-
if os.path.getsize(package) > 2 * 1024 * 1024:
218-
self.check_package_size_result.append(
219-
f"ERROR: Package size is over 2MBytes: {Path(package).name}!!!"
220-
)
221-
222210
def check_model_flatten(self):
223211
if self.whole_package_name in [
224212
"azure-mgmt-mysqlflexibleservers",
@@ -256,7 +244,6 @@ def check_model_flatten(self):
256244
raise Exception(message)
257245

258246
def check_file(self):
259-
self.check_package_size()
260247
self.check_model_flatten()
261248

262249
def sdk_code_path(self) -> str:
@@ -333,8 +320,7 @@ def create_pr_proc(self):
333320
pr_title = "[AutoRelease] {}(can only be merged by SDK owner)".format(self.new_branch)
334321
pr_head = "{}:{}".format(os.getenv("USR_NAME"), self.new_branch)
335322
pr_base = "main"
336-
pr_body = "" if not self.check_package_size_result else "{}\n".format("\n".join(self.check_package_size_result))
337-
pr_body = pr_body + "{} \n{} \n{}".format(self.issue_link, self.test_result, self.pipeline_link)
323+
pr_body = "{} \n{} \n{}".format(self.issue_link, self.test_result, self.pipeline_link)
338324
if self.has_multi_packages:
339325
pr_body += f"\nBuildTargetingString\n {self.whole_package_name}\nSkip.CreateApiReview"
340326
res_create = api.pulls.create(pr_title, pr_head, pr_base, pr_body)
@@ -351,15 +337,6 @@ def zero_version_policy(self):
351337
issue_number = int(self.issue_link.split("/")[-1])
352338
api_request.issues.add_labels(issue_number=issue_number, labels=["base-branch-attention"])
353339

354-
@property
355-
def after_multiapi_combiner(self) -> bool:
356-
content = self.get_autorest_result()
357-
return content["packages"][0]["afterMultiapiCombiner"]
358-
359-
def get_private_package(self) -> List[str]:
360-
content = self.get_autorest_result()
361-
return content["packages"][0]["artifacts"]
362-
363340
def ask_check_policy(self):
364341
changelog = self.get_changelog()
365342
if changelog == "":

scripts/automation_generate.sh

Lines changed: 4 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -1,22 +1,10 @@
11
#!/bin/bash
22

33
TEMP_FILE="$TMPDIR/auto_temp.json"
4-
# generate code
5-
python -m packaging_tools.sdk_generator "$1" "$TEMP_FILE" --debug 2>&1
6-
echo "[Generate] codegen done!!!"
7-
if [ ! -f "$TEMP_FILE" ]; then
8-
echo "[Autorest]$TEMP_FILE does not exist!!!Error happened during codegen"
9-
exit 1
10-
fi
11-
12-
if [ -f "$2" ]; then
13-
rm "$2"
14-
fi
15-
16-
# package
17-
python -m packaging_tools.sdk_package "$TEMP_FILE" "$2" --debug 2>&1
18-
echo "[Generate] generate done!!!"
4+
# generate code and package in one step
5+
python -m packaging_tools.sdk_generator "$1" "$2" --debug 2>&1
6+
echo "[Generate] generation and packaging done!!!"
197
if [ ! -f "$2" ]; then
20-
echo "[Autorest]$2 does not exist!!!Error happened during package"
8+
echo "[Autorest]$2 does not exist!!!Error happened during generation"
219
exit 1
2210
fi

scripts/sdk_generate.sh

Lines changed: 4 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -15,18 +15,10 @@ echo "$PATH"
1515
export PATH="/usr/local/n/versions/node/18.19.0/bin:$PATH"
1616

1717
TEMP_FILE="$TMPDIR/venv-sdk/auto_temp.json"
18-
# generate code
19-
python -m packaging_tools.sdk_generator "$1" "$TEMP_FILE" 2>&1
20-
echo "[Generate] codegen done!!!"
21-
if [ ! -f "$TEMP_FILE" ]; then
22-
echo "[Autorest]$TEMP_FILE does not exist!!!Error happened during codegen"
23-
exit 1
24-
fi
25-
26-
# package
27-
python -m packaging_tools.sdk_package "$TEMP_FILE" "$2" 2>&1
28-
echo "[Generate] generate done!!!"
18+
# generate code and package in one step
19+
python -m packaging_tools.sdk_generator "$1" "$2" 2>&1
20+
echo "[Generate] generation and packaging done!!!"
2921
if [ ! -f "$2" ]; then
30-
echo "[Autorest]$2 does not exist!!!Error happened during package"
22+
echo "[Autorest]$2 does not exist!!!Error happened during generation"
3123
exit 1
3224
fi

tools/azure-sdk-tools/packaging_tools/sdk_generator.py

Lines changed: 155 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
import re
1111
import os
1212
from functools import partial
13+
import multiprocessing
1314

1415
try:
1516
# py 311 adds this library natively
@@ -38,6 +39,7 @@
3839
del_outdated_generated_files,
3940
)
4041
from .conf import CONF_NAME
42+
from .package_utils import create_package, change_log_generate, extract_breaking_change, get_version_info, check_file
4143

4244
logging.basicConfig(
4345
stream=sys.stdout,
@@ -47,6 +49,11 @@
4749
_LOGGER = logging.getLogger(__name__)
4850

4951

52+
def execute_func_with_timeout(func, timeout: int = 900) -> Any:
53+
"""Execute function with timeout"""
54+
return multiprocessing.Pool(processes=1).apply_async(func).get(timeout)
55+
56+
5057
def is_multiapi_package(python_md_content: List[str]) -> bool:
5158
for line in python_md_content:
5259
if re.findall(r"\s*multiapi\s*:\s*true", line):
@@ -258,7 +265,7 @@ def main(generate_input, generate_output):
258265
config = gen_dpg(readme_or_tsp, data.get("autorestConfig", ""), dpg_relative_folder(spec_folder))
259266
_LOGGER.info(f"code generation cost time: {int(time.time() - code_generation_start_time)} seconds")
260267
except Exception as e:
261-
_LOGGER.error(f"fail to generate sdk for {readme_or_tsp}: {str(e)}")
268+
_LOGGER.error(f"Fail to generate sdk for {readme_or_tsp}: {str(e)}")
262269
for hint_message in [
263270
"======================================= Whant Can I do (begin) ========================================================================",
264271
f"Fail to generate sdk for {readme_or_tsp}. If you are from service team, please first check if the failure happens only to Python automation, or for all SDK automations. ",
@@ -293,43 +300,157 @@ def main(generate_input, generate_output):
293300
package_entry["isMultiapi"] = is_multiapi_package(readme_python_content)
294301
package_entry["targetReleaseDate"] = data.get("targetReleaseDate", "")
295302
package_entry["allowInvalidNextVersion"] = data.get("allowInvalidNextVersion", False)
296-
package_entry["runInPipeline"] = run_in_pipeline
297303
result[package_name] = package_entry
298304
else:
299305
result[package_name]["path"].append(folder_name)
300306
result[package_name][spec_word].append(readme_or_tsp)
307+
except Exception as e:
308+
_LOGGER.error(f"Fail to process package {package_name} in {readme_or_tsp}: {str(e)}")
309+
continue
301310

302-
# Generate some necessary file for new service
311+
# Generate some necessary file for new service
312+
try:
303313
init_new_service(package_name, folder_name)
314+
except Exception as e:
315+
_LOGGER.warning(f"Fail to init new service {package_name} in {readme_or_tsp}: {str(e)}")
316+
317+
# format samples and tests
318+
try:
304319
format_samples_and_tests(sdk_code_path)
320+
except Exception as e:
321+
_LOGGER.warning(f"Fail to format samples and tests for {package_name} in {readme_or_tsp}: {str(e)}")
305322

306-
# Update metadata
307-
try:
308-
update_servicemetadata(
309-
sdk_folder,
310-
data,
311-
config,
312-
folder_name,
313-
package_name,
314-
spec_folder,
315-
readme_or_tsp,
316-
)
317-
except Exception as e:
318-
_LOGGER.error(f"fail to update meta: {str(e)}")
323+
# Update metadata
324+
try:
325+
update_servicemetadata(
326+
sdk_folder,
327+
data,
328+
config,
329+
folder_name,
330+
package_name,
331+
spec_folder,
332+
readme_or_tsp,
333+
)
334+
except Exception as e:
335+
_LOGGER.warning(f"Fail to update meta: {str(e)}")
319336

320-
# Setup package locally
337+
# Setup package locally
338+
try:
321339
check_call(
322340
f"pip install --ignore-requires-python -e {sdk_code_path}",
323341
shell=True,
324342
)
343+
except Exception as e:
344+
_LOGGER.warning(f"Fail to setup package {package_name} in {readme_or_tsp}: {str(e)}")
345+
346+
# check whether multiapi package has only one api-version in per subfolder
347+
try:
348+
if result[package_name]["isMultiapi"]:
349+
check_api_version_in_subfolder(sdk_code_path)
350+
except Exception as e:
351+
_LOGGER.warning(
352+
f"Fail to check api version in subfolder for {package_name} in {readme_or_tsp}: {str(e)}"
353+
)
354+
355+
# Changelog generation
356+
try:
357+
last_version, last_stable_release = get_version_info(package_name, result[package_name]["tagIsStable"])
358+
change_log_func = partial(
359+
change_log_generate,
360+
package_name,
361+
last_version,
362+
result[package_name]["tagIsStable"],
363+
last_stable_release=last_stable_release,
364+
prefolder=folder_name,
365+
is_multiapi=result[package_name]["isMultiapi"],
366+
)
367+
368+
changelog_generation_start_time = time.time()
369+
try:
370+
md_output = execute_func_with_timeout(change_log_func)
371+
except multiprocessing.TimeoutError:
372+
md_output = "change log generation was timeout!!! You need to write it manually!!!"
373+
except:
374+
md_output = "change log generation failed!!! You need to write it manually!!!"
375+
finally:
376+
for file in ["stable.json", "current.json"]:
377+
file_path = Path(sdk_folder, folder_name, package_name, file)
378+
if file_path.exists():
379+
os.remove(file_path)
380+
_LOGGER.info(f"Remove {file_path} which is temp file to generate changelog.")
381+
382+
_LOGGER.info(
383+
f"changelog generation cost time: {int(time.time() - changelog_generation_start_time)} seconds"
384+
)
385+
result[package_name]["changelog"] = {
386+
"content": md_output,
387+
"hasBreakingChange": "Breaking Changes" in md_output,
388+
"breakingChangeItems": extract_breaking_change(md_output),
389+
}
390+
result[package_name]["version"] = last_version
391+
392+
_LOGGER.info(f"[PACKAGE]({package_name})[CHANGELOG]:{md_output}")
393+
except Exception as e:
394+
_LOGGER.warning(f"Fail to generate changelog for {package_name} in {readme_or_tsp}: {str(e)}")
395+
396+
# Generate ApiView
397+
if run_in_pipeline:
398+
apiview_start_time = time.time()
399+
try:
400+
package_path = Path(sdk_folder, folder_name, package_name)
401+
check_call(
402+
[
403+
"python",
404+
"-m",
405+
"pip",
406+
"install",
407+
"-r",
408+
"../../../eng/apiview_reqs.txt",
409+
"--index-url=https://pkgs.dev.azure.com/azure-sdk/public/_packaging/azure-sdk-for-python/pypi"
410+
"/simple/",
411+
],
412+
cwd=package_path,
413+
timeout=600,
414+
)
415+
check_call(["apistubgen", "--pkg-path", "."], cwd=package_path, timeout=600)
416+
for file in os.listdir(package_path):
417+
if "_python.json" in file and package_name in file:
418+
result[package_name]["apiViewArtifact"] = str(Path(package_path, file))
419+
except Exception as e:
420+
_LOGGER.debug(f"Fail to generate ApiView token file for {package_name}: {e}")
421+
_LOGGER.info(f"apiview generation cost time: {int(time.time() - apiview_start_time)} seconds")
422+
else:
423+
_LOGGER.info("Skip ApiView generation for package that does not run in pipeline.")
325424

326-
# check whether multiapi package has only one api-version in per subfolder
327-
check_api_version_in_subfolder(sdk_code_path)
425+
# check generated files and update package["version"]
426+
if package_name.startswith("azure-mgmt-"):
427+
try:
428+
check_file(result[package_name])
429+
except Exception as e:
430+
_LOGGER.warning(f"Fail to check generated files for {package_name}: {e}")
328431

329-
# could be removed in the short future
330-
result[package_name]["afterMultiapiCombiner"] = False
432+
# Build artifacts for package
433+
try:
434+
create_package(result[package_name]["path"][0], package_name)
435+
dist_path = Path(sdk_folder, folder_name, package_name, "dist")
436+
result[package_name]["artifacts"] = [
437+
str(dist_path / package_file) for package_file in os.listdir(dist_path)
438+
]
439+
for artifact in result[package_name]["artifacts"]:
440+
if ".whl" in artifact:
441+
result[package_name]["language"] = "Python"
442+
break
443+
_LOGGER.info(f"Built package {package_name} successfully.")
331444
except Exception as e:
332-
_LOGGER.error(f"fail to setup package: {str(e)}")
445+
_LOGGER.warning(f"Fail to build package {package_name} in {readme_or_tsp}: {str(e)}")
446+
447+
# update result
448+
result[package_name]["installInstructions"] = {
449+
"full": "You can use pip to install the artifacts.",
450+
"lite": f"pip install {package_name}",
451+
}
452+
result[package_name]["result"] = "succeeded"
453+
result[package_name]["packageFolder"] = result[package_name]["path"][0]
333454

334455
# remove duplicates
335456
try:
@@ -340,13 +461,23 @@ def main(generate_input, generate_output):
340461
if value.get("readmeMd"):
341462
value["readmeMd"] = list(set(value["readmeMd"]))
342463
except Exception as e:
343-
_LOGGER.error(f"fail to remove duplicates: {str(e)}")
464+
_LOGGER.warning(f"Fail to remove duplicates: {str(e)}")
344465

345466
if len(result) == 0 and len(readme_and_tsp) > 1:
346467
raise Exception("No package is generated, please check the log for details")
347468

469+
if len(result) == 0:
470+
_LOGGER.info("No packages to process, returning empty result")
471+
else:
472+
_LOGGER.info(f"Processing {len(result)} generated packages...")
473+
474+
final_result = {"packages": list(result.values())}
348475
with open(generate_output, "w") as writer:
349-
json.dump(result, writer)
476+
json.dump(final_result, writer, indent=2)
477+
478+
_LOGGER.info(
479+
f"Congratulations! Succeed to build package for {[p['packageName'] for p in final_result['packages']]}. And you shall be able to see the generated code when running 'git status'."
480+
)
350481

351482

352483
def generate_main():

0 commit comments

Comments
 (0)