diff --git a/conda/conda-recipes/azure-mgmt/meta.yaml b/conda/conda-recipes/azure-mgmt/meta.yaml index 9735b91ed0fe..a91087d648d9 100644 --- a/conda/conda-recipes/azure-mgmt/meta.yaml +++ b/conda/conda-recipes/azure-mgmt/meta.yaml @@ -75,7 +75,7 @@ test: - azure.mgmt.applicationinsights.v2022_06_15.aio.operations - azure.mgmt.applicationinsights.v2022_06_15.models - azure.mgmt.applicationinsights.v2022_06_15.operations - - azure-mgmt-arizeaiobservabilityeval + - azure.mgmt.arizeaiobservabilityeval - azure.mgmt.arizeaiobservabilityeval.aio - azure.mgmt.arizeaiobservabilityeval.aio.operations - azure.mgmt.arizeaiobservabilityeval.models @@ -152,7 +152,7 @@ test: - azure.mgmt.botservice.aio.operations - azure.mgmt.botservice.models - azure.mgmt.botservice.operations - - azure-mgmt-carbonoptimization + - azure.mgmt.carbonoptimization - azure.mgmt.carbonoptimization.aio - azure.mgmt.carbonoptimization.aio.operations - azure.mgmt.carbonoptimization.models @@ -421,7 +421,7 @@ test: - azure.mgmt.hanaonazure.aio.operations - azure.mgmt.hanaonazure.models - azure.mgmt.hanaonazure.operations - - azure-mgmt-hardwaresecuritymodules + - azure.mgmt.hardwaresecuritymodules - azure.mgmt.hardwaresecuritymodules.aio - azure.mgmt.hardwaresecuritymodules.aio.operations - azure.mgmt.hardwaresecuritymodules.models @@ -517,7 +517,7 @@ test: - azure.mgmt.labservices.aio.operations - azure.mgmt.labservices.models - azure.mgmt.labservices.operations - - azure-mgmt-lambdatesthyperexecute + - azure.mgmt.lambdatesthyperexecute - azure.mgmt.lambdatesthyperexecute.aio - azure.mgmt.lambdatesthyperexecute.aio.operations - azure.mgmt.lambdatesthyperexecute.models @@ -612,7 +612,7 @@ test: - azure.mgmt.mongocluster.aio.operations - azure.mgmt.mongocluster.models - azure.mgmt.mongocluster.operations - - azure-mgmt-mongodbatlas + - azure.mgmt.mongodbatlas - azure.mgmt.mongodbatlas.aio - azure.mgmt.mongodbatlas.aio.operations - azure.mgmt.mongodbatlas.models @@ -732,7 +732,7 @@ test: - azure.mgmt.privatedns.aio.operations - azure.mgmt.privatedns.models - azure.mgmt.privatedns.operations - - azure-mgmt-purestorageblock + - azure.mgmt.purestorageblock - azure.mgmt.purestorageblock.aio - azure.mgmt.purestorageblock.aio.operations - azure.mgmt.purestorageblock.models @@ -793,7 +793,7 @@ test: - azure.mgmt.recoveryservicesbackup.passivestamp.aio.operations - azure.mgmt.recoveryservicesbackup.passivestamp.models - azure.mgmt.recoveryservicesbackup.passivestamp.operations - - azure-mgmt-recoveryservicesdatareplication + - azure.mgmt.recoveryservicesdatareplication - azure.mgmt.recoveryservicesdatareplication.aio - azure.mgmt.recoveryservicesdatareplication.aio.operations - azure.mgmt.recoveryservicesdatareplication.models @@ -974,7 +974,7 @@ test: - azure.mgmt.storage.aio.operations - azure.mgmt.storage.models - azure.mgmt.storage.operations - - azure-mgmt-storageactions + - azure.mgmt.storageactions - azure.mgmt.storageactions.aio - azure.mgmt.storageactions.aio.operations - azure.mgmt.storageactions.models diff --git a/conda/conda_helper_functions.py b/conda/conda_helper_functions.py new file mode 100644 index 000000000000..2e5a2be74640 --- /dev/null +++ b/conda/conda_helper_functions.py @@ -0,0 +1,253 @@ +""" +Helper functions for updating conda files. +""" + +import os +import glob +from typing import Dict, List, Optional, Tuple +import csv +import json +from ci_tools.logging import logger +import urllib.request +from datetime import datetime +from ci_tools.parsing import ParsedSetup + +ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) +SDK_DIR = os.path.join(ROOT_DIR, "sdk") + +AZURE_SDK_CSV_URL = "https://raw.githubusercontent.com/Azure/azure-sdk/main/_data/releases/latest/python-packages.csv" +PACKAGE_COL = "Package" +LATEST_GA_DATE_COL = "LatestGADate" +VERSION_GA_COL = "VersionGA" +FIRST_GA_DATE_COL = "FirstGADate" +DISPLAY_NAME_COL = "DisplayName" +SERVICE_NAME_COL = "ServiceName" +REPO_PATH_COL = "RepoPath" +TYPE_COL = "Type" +SUPPORT_COL = "Support" + +# ===================================== +# Helpers for handling bundled releases +# ===================================== + +def get_package_path(package_name: str) -> Optional[str]: + """Get the filesystem path of an SDK package given its name.""" + pattern = os.path.join(SDK_DIR, "**", package_name) + matches = glob.glob(pattern, recursive=True) + if not matches: + logger.error(f"Package path not found for package: {package_name}") + return None + return matches[0] + +def get_bundle_name(package_name: str) -> Optional[str]: + """ + Check bundled release config from package's pyproject.toml file. + + If bundled, return the bundle name; otherwise, return None. + """ + package_path = get_package_path(package_name) + if not package_path: + logger.warning(f"Cannot determine package path for {package_name}") + return None + parsed = ParsedSetup.from_path(package_path) + if not parsed: + # TODO raise something + logger.error(f"Failed to parse setup for package {package_name}") + return None + + conda_config = parsed.get_conda_config() + + if not conda_config: + if parsed.is_stable_release(): + # TODO raise something + logger.warning(f"Stable release package {package_name} needs a conda config") + return None + + if conda_config and "bundle_name" in conda_config: + return conda_config["bundle_name"] + + return None + +def map_bundle_to_packages(package_names: List[str]) -> Dict[str, List[str]]: + """Create a mapping of bundle names to their constituent package names.""" + logger.info("Mapping bundle names to packages...") + all_paths = glob.glob(os.path.join(SDK_DIR, "*", "*")) + # Exclude temp directories like .tox, .venv, __pycache__, etc. + path_lookup = { + os.path.basename(p): p + for p in all_paths + if os.path.isdir(p) and not os.path.basename(p).startswith((".", "__")) + } + + bundle_map = {} + for package_name in package_names: + logger.debug(f"Processing package for bundle mapping: {package_name}") + package_path = path_lookup.get(package_name) + if not package_path: + logger.warning(f"Package path not found for {package_name}") + continue + + # Skip directories without pyproject.toml + if not os.path.exists(os.path.join(package_path, "pyproject.toml")): + logger.warning(f"Skipping {package_name}: no pyproject.toml found") + continue + + parsed = ParsedSetup.from_path(package_path) + if not parsed: + logger.error(f"Failed to parse setup for package {package_name}") + continue + + conda_config = parsed.get_conda_config() + if conda_config and "bundle_name" in conda_config: + bundle_name = conda_config["bundle_name"] + logger.debug(f"Bundle name for package {package_name}: {bundle_name}") + bundle_map.setdefault(bundle_name, []).append(package_name) + + return bundle_map + +# ===================================== +# Utility functions for parsing data +# ===================================== + +def parse_csv() -> List[Dict[str, str]]: + """Download and parse the Azure SDK Python packages CSV file.""" + try: + logger.info(f"Downloading CSV from {AZURE_SDK_CSV_URL}") + + with urllib.request.urlopen(AZURE_SDK_CSV_URL) as response: + csv_content = response.read().decode("utf-8") + + # Parse the CSV content + csv_reader = csv.DictReader(csv_content.splitlines()) + packages = list(csv_reader) + + logger.info(f"Successfully parsed {len(packages)} packages from CSV") + + return packages + + except Exception as e: + logger.error(f"Failed to download or parse CSV: {e}") + return [] + + +def is_mgmt_package(pkg: Dict[str, str]) -> bool: + pkg_name = pkg.get(PACKAGE_COL, "") + _type = pkg.get(TYPE_COL, "") + if _type == "mgmt": + return True + elif _type == "client": + return False + else: + return pkg_name != "azure-mgmt-core" and ( + "mgmt" in pkg_name or "cognitiveservices" in pkg_name + ) + + +def separate_packages_by_type( + packages: List[Dict[str, str]], +) -> Tuple[List[Dict[str, str]], List[Dict[str, str]]]: + """Separate packages into data plane and management plane libraries.""" + data_plane_packages = [] + mgmt_plane_packages = [] + + for pkg in packages: + if is_mgmt_package(pkg): + mgmt_plane_packages.append(pkg) + else: + data_plane_packages.append(pkg) + + logger.debug( + f"Separated {len(data_plane_packages)} data plane and {len(mgmt_plane_packages)} management plane packages" + ) + + return (data_plane_packages, mgmt_plane_packages) + + +def package_needs_update( + package_row: Dict[str, str], prev_release_date: str, is_new=False +) -> bool: + """ + Check if the package is new or needs version update (i.e., FirstGADate or LatestGADate is after the last release). + + :param package_row: The parsed CSV row for the package. + :param prev_release_date: The date of the previous release in "mm/dd/yyyy" format. + :param is_new: Whether to check for new package (FirstGADate) or outdated package (LatestGADate). + :return: if the package is new or needs an update. + """ + compare_date = ( + package_row.get(FIRST_GA_DATE_COL) + if is_new + else package_row.get(LATEST_GA_DATE_COL) + ) + + logger.debug( + f"Checking {'new package' if is_new else 'outdated package'} for package {package_row.get(PACKAGE_COL)} with against date: {compare_date}" + ) + + if not compare_date: + if not is_new and package_row.get(PACKAGE_COL) == "uamqp": + return True # uamqp is an exception + + logger.debug( + f"Package {package_row.get(PACKAGE_COL)} is skipped due to missing {FIRST_GA_DATE_COL if is_new else LATEST_GA_DATE_COL}." + ) + + # TODO need to verify that this is the desired behavior / we're not skipping needed packages + + return False + + try: + # Convert string dates to datetime objects for proper comparison + compare_date = datetime.strptime(compare_date, "%m/%d/%Y") + prev_date = datetime.strptime(prev_release_date, "%m/%d/%Y") + logger.debug( + f"Comparing {package_row.get(PACKAGE_COL)} CompareDate {compare_date} with previous release date {prev_date}" + ) + return compare_date > prev_date + except ValueError as e: + logger.error( + f"Date parsing error for package {package_row.get(PACKAGE_COL)}: {e}" + ) + return False + + +def get_package_data_from_pypi( + package_name: str, +) -> Tuple[Optional[str], Optional[str]]: + """Fetch the latest version and download URI for a package from PyPI.""" + pypi_url = f"https://pypi.org/pypi/{package_name}/json" + try: + with urllib.request.urlopen(pypi_url, timeout=10) as response: + data = json.loads(response.read().decode("utf-8")) + + # Get the latest version + latest_version = data["info"]["version"] + if latest_version in data["releases"] and data["releases"][latest_version]: + # Get the source distribution (sdist) if available + files = data["releases"][latest_version] + source_dist = next( + (f for f in files if f["packagetype"] == "sdist"), None + ) + if source_dist: + download_url = source_dist["url"] + logger.info( + f"Found download URL for {package_name}=={latest_version}: {download_url}" + ) + return latest_version, download_url + + except Exception as e: + logger.error(f"Failed to fetch download URI from PyPI for {package_name}: {e}") + return None, None + + +def build_package_index(conda_artifacts: List[Dict]) -> Dict[str, Tuple[int, int]]: + """Build an index of package name -> (artifact_idx, checkout_idx) for fast lookups in conda-sdk-client.yml.""" + package_index = {} + + for artifact_idx, artifact in enumerate(conda_artifacts): + if "checkout" in artifact: + for checkout_idx, checkout_item in enumerate(artifact["checkout"]): + package_name = checkout_item.get("package") + if package_name: + package_index[package_name] = (artifact_idx, checkout_idx) + return package_index diff --git a/conda/conda_release_groups.py b/conda/conda_release_groups.py new file mode 100644 index 000000000000..31a1dff5bc75 --- /dev/null +++ b/conda/conda_release_groups.py @@ -0,0 +1,127 @@ +""" +Mapping of Azure SDK package names to their release group, used for Conda +release file updates in update_conda_files.py + +New grouped packages should be registered before using the script +to update for Conda releases. + +Packages that are not listed here are treated as standalone packages, +each forming their own release group (excluding mgmt packages, which will +by default be grouped). + +Packages that are grouped together will: + 1. Share a single release log file (e.g., azure-communication.md for all communication packages) + 2. Be listed under one CondaArtifact entry in conda-sdk-client.yml + 3. Be released together under a single release parameter +""" + +RELEASE_GROUPS = { + # Core + "azure-core": { + "packages": ["azure-core", "azure-mgmt-core"], + "common_root": "azure", + "service": "core", + }, + # Communication + "azure-communication": { + "packages": [ + "azure-communication-chat", + "azure-communication-email", + "azure-communication-identity", + "azure-communication-phonenumbers", + "azure-communication-sms", + "azure-communication-callautomation", + "azure-communication-rooms", + "azure-communication-jobrouter", + "azure-communication-messages", + ], + "common_root": "azure/communication", + "service": "communication", + }, + # Storage + "azure-storage": { + "packages": [ + "azure-storage-blob", + "azure-storage-queue", + "azure-storage-file-share", + "azure-storage-file-datalake", + ], + "common_root": "azure/storage", + "service": "storage", + }, + # Schema Registry + "azure-schemaregistry": { + "packages": [ + "azure-schemaregistry", + "azure-schemaregistry-avroencoder", + ], + "common_root": "azure/schemaregistry", + "service": "schemaregistry", + }, + # Event Hub + "azure-eventhub": { + "packages": [ + "azure-eventhub", + "azure-eventhub-checkpointstoreblob", + "azure-eventhub-checkpointstoreblob-aio", + ], + "common_root": "azure/eventhub", + "service": "eventhub", + }, + "azure-keyvault": { + "packages": [ + "azure-keyvault-administration", + "azure-keyvault-secrets", + "azure-keyvault-keys", + "azure-keyvault-certificates", + ], + "common_root": "azure/keyvault", + "service": "keyvault", + }, + # Packages with other pattern exceptions, e.g. different common root + # or service vs package name mismatch + "msrest": {"packages": ["msrest"], "common_root": None}, + "msal": {"packages": ["msal"], "common_root": None}, + "msal-extensions": { + "packages": ["msal-extensions"], + "common_root": "msal", + }, + "azure-ai-vision": { + "packages": ["azure-ai-vision-imageanalysis"], + "common_root": "azure/vision", + }, + "azure-healthinsights": { + "packages": ["azure-healthinsights-radiologyinsights"], + "common_root": "azure", + "service": "healthinsights", + }, +} + + +# Reverse mapping: package name -> release group name +def get_package_to_group_mapping(): + mapping = {} + for group_name, group_info in RELEASE_GROUPS.items(): + for package in group_info["packages"]: + mapping[package] = group_name + return mapping + + +def get_release_group(package_name: str, package_to_group: dict) -> str: + """ + Get the release group name for a given package. + + :param package_name: The package name (e.g., "azure-core", "azure-communication-chat") + :return: The release group name (e.g., "azure-core", "azure-communication"), or package name itself if not grouped + """ + return package_to_group.get(package_name, package_name) + + +def get_package_group_data(group_name: str) -> dict: + """ + Get all packages that belong to a release group. + + :param group_name: The release group name + :return: The group data dictionary, or empty dict if not found + """ + return RELEASE_GROUPS.get(group_name, {}) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py new file mode 100644 index 000000000000..746f285ea61c --- /dev/null +++ b/conda/update_conda_files.py @@ -0,0 +1,1044 @@ +"""Update package versions, yml files, release-logs, and changelogs for conda packages.""" + +import os +import argparse +import yaml +import re +import glob +from datetime import datetime +from dateutil.relativedelta import relativedelta +from ci_tools.logging import logger, configure_logging +from ci_tools.parsing import ParsedSetup, extract_package_metadata +from typing import Dict, List, Optional, Tuple +from conda_helper_functions import ( + parse_csv, + separate_packages_by_type, + package_needs_update, + get_package_data_from_pypi, + build_package_index, + get_package_path, + get_bundle_name, + map_bundle_to_packages, + PACKAGE_COL, + VERSION_GA_COL, + LATEST_GA_DATE_COL, + REPO_PATH_COL, + SUPPORT_COL, +) + +from conda_release_groups import ( + get_package_group_data, + get_release_group, + get_package_to_group_mapping, +) + +# paths +ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) +SDK_DIR = os.path.join(ROOT_DIR, "sdk") +CONDA_DIR = os.path.join(ROOT_DIR, "conda") +CONDA_RECIPES_DIR = os.path.join(CONDA_DIR, "conda-recipes") +CONDA_RELEASE_LOGS_DIR = os.path.join(CONDA_DIR, "conda-releaselogs") +CONDA_ENV_PATH = os.path.join(CONDA_RECIPES_DIR, "conda_env.yml") +CONDA_CLIENT_YAML_PATH = os.path.join( + ROOT_DIR, "eng", "pipelines", "templates", "stages", "conda-sdk-client.yml" +) +CONDA_MGMT_META_YAML_PATH = os.path.join(CONDA_RECIPES_DIR, "azure-mgmt", "meta.yaml") + +# constants +RELEASE_PERIOD_MONTHS = 3 + +# packages that should be shipped but are known to be missing from the csv +PACKAGES_WITH_DOWNLOAD_URI = [ + "msal", + "msal-extensions", +] + +# ===================================== +# Helpers for updating conda_env.yml +# ===================================== + + +class quoted(str): + pass + + +def quoted_presenter(dumper, data): + """YAML presenter to force quotes around a string.""" + return dumper.represent_scalar("tag:yaml.org,2002:str", data, style="'") + + +def update_conda_version() -> Tuple[datetime, str]: + """Update the AZURESDK_CONDA_VERSION in conda_env.yml and return the old and new versions.""" + + with open(CONDA_ENV_PATH, "r") as file: + conda_env_data = yaml.safe_load(file) + + old_version = conda_env_data["variables"]["AZURESDK_CONDA_VERSION"] + old_date = datetime.strptime(old_version, "%Y.%m.%d") + + new_date = old_date + relativedelta(months=RELEASE_PERIOD_MONTHS) + + # bump version + new_version = new_date.strftime("%Y.%m.%d") + conda_env_data["variables"]["AZURESDK_CONDA_VERSION"] = quoted(new_version) + + yaml.add_representer(quoted, quoted_presenter) + + with open(CONDA_ENV_PATH, "w") as file: + yaml.dump(conda_env_data, file, default_flow_style=False, sort_keys=False) + + logger.info(f"Updated AZURESDK_CONDA_VERSION from {old_version} to {new_version}") + + return old_date, new_version + + +# ===================================== +# Helpers for updating conda-sdk-client.yml +# ===================================== + + +class IndentDumper(yaml.SafeDumper): + """Used to preserve indentation levels in conda-sdk-client.yml.""" + + def increase_indent(self, flow=False, indentless=False): + return super().increase_indent(flow, False) + + +def update_conda_sdk_client_yml( + package_dict: Dict[str, Dict[str, str]], + packages_to_update: List[str], + new_data_plane_packages: List[str], + new_mgmt_plane_packages: List[str], +) -> List[str]: + """ + Update outdated package versions and add new entries in conda-sdk-client.yml file + + :param package_dict: Dictionary mapping package names to their CSV row data. + :param packages_to_update: List of package names that need version updates. + :param new_data_plane_packages: List of new data plane package names. + :param new_mgmt_plane_packages: List of new management plane package names. + :return: List of package names that were not updated or added and may require manual action. + """ + updated_count = 0 + added_count = 0 + result = [] + + with open(CONDA_CLIENT_YAML_PATH, "r") as file: + conda_client_data = yaml.safe_load(file) + + conda_artifacts = conda_client_data["extends"]["parameters"]["stages"][0]["jobs"][ + 0 + ]["steps"][0]["parameters"]["CondaArtifacts"] + + # === Update outdated package versions === + + logger.info( + f"Detected {len(packages_to_update)} outdated package versions to update in conda-sdk-client.yml" + ) + package_index = build_package_index(conda_artifacts) + + for pkg_name in packages_to_update: + pkg = package_dict.get(pkg_name, {}) + new_version = pkg.get(VERSION_GA_COL) + if pkg_name in package_index: + artifact_idx, checkout_idx = package_index[pkg_name] + checkout_item = conda_artifacts[artifact_idx]["checkout"][checkout_idx] + + if "version" in checkout_item: + old_version = checkout_item.get("version", "") + checkout_item["version"] = new_version + logger.info(f"Updated {pkg_name}: {old_version} -> {new_version}") + updated_count += 1 + else: + logger.warning( + f"Package {pkg_name} has no 'version' field, skipping update" + ) + result.append(pkg_name) + else: + logger.warning( + f"Package {pkg_name} not found in conda-sdk-client.yml, skipping update" + ) + result.append(pkg_name) + + # handle download_uri for packages known to be missing from the csv + for pkg_name in PACKAGES_WITH_DOWNLOAD_URI: + if pkg_name in package_index: + artifact_idx, checkout_idx = package_index[pkg_name] + checkout_item = conda_artifacts[artifact_idx]["checkout"][checkout_idx] + + curr_download_uri = checkout_item.get("download_uri", "") + latest_version, download_uri = get_package_data_from_pypi(pkg_name) + + if not latest_version or not download_uri: + logger.warning( + f"Could not retrieve latest version or download URI for {pkg_name} from PyPI, skipping" + ) + result.append(pkg_name) + continue + + if curr_download_uri != download_uri: + # version needs update + logger.info( + f"Package {pkg_name} download_uri mismatch with PyPi, updating {curr_download_uri} to {download_uri}" + ) + checkout_item["version"] = latest_version + checkout_item["download_uri"] = download_uri + logger.info( + f"Updated download_uri for {pkg_name} with version {latest_version}: {download_uri}" + ) + updated_count += 1 + else: + logger.warning( + f"Package {pkg_name} not found in conda-sdk-client.yml, skipping download_uri update" + ) + result.append(pkg_name) + + # === Add new data plane packages === + + logger.info( + f"Detected {len(new_data_plane_packages)} new data plane packages to add to conda-sdk-client.yml" + ) + + parameters = conda_client_data["parameters"] + + # quick look up for handling bundled package releases + existing_parameter_names = [p.get("name") for p in parameters] + existing_artifact_names = { + a.get("name"): idx for idx, a in enumerate(conda_artifacts) + } + + for package_name in new_data_plane_packages: + pkg = package_dict.get(package_name, {}) + + if package_name in package_index: + logger.warning( + f"New package {package_name} already exists in conda-sdk-client.yml, skipping addition" + ) + result.append(package_name) + continue + + # bundle info is based on pyproject.toml + bundle_name = get_bundle_name(package_name) + + if bundle_name: + # package is part of a bundle + logger.info( + f"Package {package_name} belongs to release bundle {bundle_name}" + ) + release_name = f"release_{bundle_name.replace('-', '_')}" + display_name = bundle_name + else: + # package is released individually + release_name = f"release_{package_name.replace('-', '_')}" + display_name = package_name + + # add new release parameter if not exists + if release_name not in existing_parameter_names: + logger.info(f"Adding new release parameter: {release_name}") + new_parameter = { + "name": release_name, + "displayName": display_name, + "type": "boolean", + "default": True, + } + parameters.append(new_parameter) + existing_parameter_names.append(release_name) + + # add to CondaArtifacts + curr_version = pkg.get(VERSION_GA_COL) + + if not curr_version: + logger.error( + f"Package {package_name} is missing version info, skipping addition" + ) + result.append(package_name) + continue + + checkout_package = {"package": package_name, "version": curr_version} + common_root, service_name = determine_service_info(pkg, bundle_name) + + if package_name in existing_artifact_names: + # individual released package already exists + logger.warning( + f"New package {package_name} already exists in conda-sdk-client.yml, skipping addition" + ) + result.append(package_name) + continue + + if bundle_name and bundle_name in existing_artifact_names: + # bundle already exists, will append packages to it + logger.info( + f"Release bundle {bundle_name} already exists in conda-sdk-client.yml, will append package {package_name} to it" + ) + conda_artifacts[existing_artifact_names[bundle_name]]["checkout"].append( + checkout_package + ) + else: + # no existing artifact, whether bundle or not -> create + new_artifact_entry = { + "name": bundle_name if bundle_name else package_name, + "common_root": common_root, + "service": service_name, + "in_batch": f"${{{{ parameters.{release_name} }}}}", + "checkout": [checkout_package], + } + # append before azure-mgmt entry + conda_artifacts.insert(len(conda_artifacts) - 1, new_artifact_entry) + + added_count += 1 + logger.info(f"Added new data plane package: {package_name}") + + existing_artifact_names[bundle_name if bundle_name else package_name] = ( + len(conda_artifacts) - 2 + ) # new index + + # === Add new mgmt plane packages === + + logger.info( + f"Detected {len(new_mgmt_plane_packages)} new management plane packages to add to conda-sdk-client.yml" + ) + + # assumes azure-mgmt will always be the last CondaArtifacts entry + azure_mgmt_artifact_checkout = conda_artifacts[-1]["checkout"] + + for package_name in new_mgmt_plane_packages: + pkg = package_dict.get(package_name, {}) + + if package_name in package_index: + logger.warning( + f"New package {package_name} already exists in conda-sdk-client.yml, skipping addition" + ) + result.append(package_name) + continue + + new_mgmt_entry = { + "package": package_name, + "version": pkg.get(VERSION_GA_COL), + } + + azure_mgmt_artifact_checkout.append(new_mgmt_entry) + + added_count += 1 + logger.info(f"Added new management plane package: {package_name}") + + # sort mgmt packages alphabetically + azure_mgmt_artifact_checkout.sort(key=lambda x: x["package"]) + + # TODO note this dump doesn't preserve some quotes like around + # displayName: 'azure-developer-loadtesting' but i don't think those functionally necessary? + # double check that this is ok, esp for URLs... ^ + + if updated_count > 0 or added_count > 0: + with open(CONDA_CLIENT_YAML_PATH, "w") as file: + yaml.dump( + conda_client_data, + file, + Dumper=IndentDumper, + default_flow_style=False, + sort_keys=False, + indent=2, + width=float("inf"), + ) + logger.info( + f"Successfully updated {updated_count} package versions in conda-sdk-client.yml" + ) + else: + logger.warning("No packages were found in the YAML file to update") + return result + + +# ===================================== +# Helpers for creating conda-recipes//meta.yaml files +# ===================================== + + +def determine_service_info( + pkg: Dict[str, str], bundle_name: Optional[str] +) -> Tuple[str, str]: + """ + Returns the common root and service name for the given package. + + :param package_name: The name of the package (e.g., "azure-ai-textanalytics"). + :param bundle_name: The name of the bundle/release group the package belongs to, if any. + """ + # defaults + package_name = pkg.get(PACKAGE_COL, "") + service_name = pkg.get(REPO_PATH_COL, "").lower() + + if bundle_name: + common_root = f"azure/{bundle_name.split('-')[1]}" + else: + common_root = "azure" + + # TODO handle exceptions msrest,msal.msal-extensions,azure-ai-vision,azure-healthinsights + package_path = get_package_path(package_name) + if not service_name and package_path: + service_name = os.path.basename(os.path.dirname(package_path)) + + return common_root, service_name + + +def format_requirement(req: str) -> str: + """Format a requirement string for conda meta.yaml.""" + name_unpinned = re.split(r"[>=={{{{ environ.get('AZURESDK_CONDA_VERSION', '0.0.0') }}}}" + return req + + +def get_package_requirements(parsed: ParsedSetup) -> Tuple[List[str], List[str]]: + """Retrieve the host and run requirements for a data plane package meta.yaml.""" + host_requirements = set(["pip"]) + run_requirements = set() + + # TODO finalize actual list of essentials, this is more of a placeholder with reqs idk how to find dynamically + for essential_req in [ + "azure-identity", + "azure-core", + "python", + "aiohttp", + "requests-oauthlib >=0.5.0", + "cryptography", + ]: + req_name = format_requirement(essential_req) + host_requirements.add(req_name) + run_requirements.add(req_name) + + package_path = get_package_path(parsed.name) + if not package_path: + logger.error(f"Could not find package path for {parsed.name}") + return list(host_requirements), list(run_requirements) + + # get requirements from setup.py or pyproject.toml + install_reqs = parsed.requires + + for req in install_reqs: + req_name = format_requirement(req) + host_requirements.add(req_name) + run_requirements.add(req_name) + + # TODO there are other requirements to consider... + + return list(host_requirements), list(run_requirements) + + +def get_package_metadata(package_name: str, package_path: Optional[str]) -> Tuple[str, str, str]: + """Extract package metadata for about section in meta.yaml.""" + pkg_metadata = extract_package_metadata(package_path) + if package_path: + service_dir = os.path.basename(os.path.dirname(package_path)) + else: + # TODO + service_dir = package_name.replace("azure-", "") + home_url = f"https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/{service_dir}/{package_name}" + + # TODO check correctness of this + if pkg_metadata and pkg_metadata.get("description"): + summary = pkg_metadata["description"] + else: + summary = f"Microsoft Azure {package_name.replace('azure-', '').replace('-', ' ').title()} Client Library for Python" + + # TODO definitely need to check if this is actually always correct + conda_url = f"https://aka.ms/azsdk/conda/releases/{service_dir}" + description = ( + f"This is the {summary}.\n Please see {conda_url} for version details." + ) + + return home_url, summary, description + + +def generate_data_plane_meta_yaml( + package_dict: Dict[str, Dict[str, str]], + bundle_map: Dict[str, List[str]], + package_name: str, + bundle_name: Optional[str], +) -> str: + """ + Generate the meta.yaml content for a data plane package or release group. + """ + + # TODO is it correct that the env var name is arbitrary and replaced in conda_functions.py? + src_distr_name = package_name.split("-")[-1].upper() + src_distribution_env_var = f"{src_distr_name}_SOURCE_DISTRIBUTION" + + # TODO not sure if this is the best way to get these requirements + # TODO don't think this covers all possible import tests, e.g. azure.eventgrid, azure.eventgrid.aio <- when would I add that? + if bundle_name: + # handle grouped packages + logger.info( + f"Generating meta.yaml for release group {bundle_name} including packages: {bundle_map[bundle_name]}" + ) + host_reqs = set() + run_reqs = set() + pkg_imports = [] + + for pkg in bundle_map[bundle_name]: + package_path = get_package_path(pkg) + parsed_setup = ParsedSetup.from_path(package_path) + + pkg_host_reqs, pkg_run_reqs = get_package_requirements(parsed_setup) + host_reqs.update(pkg_host_reqs) + run_reqs.update(pkg_run_reqs) + + pkg_imports.append(pkg.replace("-", ".")) + host_reqs = list(host_reqs) + run_reqs = list(run_reqs) + + package_path = get_package_path(bundle_map[bundle_name][0]) + home_url, summary, description = get_package_metadata( + bundle_name, package_path + ) + else: + logger.info(f"Generating meta.yaml for package {package_name}") + package_path = get_package_path(package_name) + parsed_setup = ParsedSetup.from_path(package_path) + + host_reqs, run_reqs = get_package_requirements(parsed_setup) + pkg_imports = [package_name.replace("-", ".")] + + home_url, summary, description = get_package_metadata( + package_name, package_path + ) + + # Format requirements with proper YAML indentation + host_reqs_str = "\n - ".join(host_reqs) + run_reqs_str = "\n - ".join(run_reqs) + pkg_imports_str = "\n - ".join(pkg_imports) + meta_yaml_content = f"""{{% set name = "{package_name}" %}} + +package: + name: "{{{{ name|lower }}}}" + version: {{{{ environ.get('AZURESDK_CONDA_VERSION', '0.0.0') }}}} + +source: + url: {{{{ environ.get('{src_distribution_env_var}', '') }}}} + +build: + noarch: python + number: 0 + script: "{{{{ PYTHON }}}} -m pip install . -vv" + +requirements: + host: + - {host_reqs_str} + run: + - {run_reqs_str} + +test: + imports: + - {pkg_imports_str} + +about: + home: "{home_url}" + license: MIT + license_family: MIT + license_file: + summary: "{summary}" + description: | + {description} + doc_url: + dev_url: + +extra: + recipe-maintainers: + - xiangyan99 +""" + return meta_yaml_content + + +def add_new_data_plane_packages( + package_dict: Dict[str, Dict[str, str]], bundle_map: Dict[str, List[str]], new_data_plane_names: List[str] +) -> List[str]: + """Create meta.yaml files for new data plane packages and add import tests.""" + if len(new_data_plane_names) == 0: + return [] + + logger.info(f"Adding {len(new_data_plane_names)} new data plane packages") + result = [] + + # bundles are processed once when encountering the first package in that group + bundles_processed = set() + for package_name in new_data_plane_names: + logger.info(f"Adding new data plane meta.yaml for: {package_name}") + + pkg_yaml_path = os.path.join(CONDA_RECIPES_DIR, package_name, "meta.yaml") + os.makedirs(os.path.dirname(pkg_yaml_path), exist_ok=True) + + bundle_name = get_bundle_name(package_name) + + if bundle_name and bundle_name in bundles_processed: + logger.info( + f"Meta.yaml for bundle {bundle_name} already created, skipping {package_name}" + ) + continue + + try: + meta_yml = generate_data_plane_meta_yaml( + package_dict, bundle_map, package_name, bundle_name + ) + if bundle_name: + bundles_processed.add(bundle_name) + except Exception as e: + logger.error( + f"Failed to generate meta.yaml content for {package_name} and skipping, error: {e}" + ) + result.append(package_name) + continue + + try: + with open(pkg_yaml_path, "w") as f: + f.write(meta_yml) + logger.info(f"Created meta.yaml for {package_name} at {pkg_yaml_path}") + except Exception as e: + logger.error(f"Failed to create meta.yaml for {package_name}: {e}") + result.append(package_name) + + return result + + +# ===================================== +# Helpers for adding new mgmt plane packages to azure-mgmt/meta.yaml +# ===================================== + + +def add_new_mgmt_plane_packages(new_packages: List[Dict[str, str]]) -> List[str]: + """Update azure-mgmt/meta.yaml with new management libraries, and add import tests.""" + if len(new_packages) == 0: + return [] + logger.info(f"Adding {len(new_packages)} new management plane packages") + result = [] + + # can't use pyyaml due to jinja2 + with open(CONDA_MGMT_META_YAML_PATH, "r") as file: + content = file.read() + + test_match = re.search( + r"^test:\s*\n\s*imports:(.*?)^(?=\w)", content, re.MULTILINE | re.DOTALL + ) + if not test_match: + logger.error("Could not find 'test: imports:' section in meta.yaml") + result.extend([pkg.get(PACKAGE_COL) for pkg in new_packages]) + return result + + existing_imports_text = test_match.group(1) + existing_imports = [ + line.strip() + for line in existing_imports_text.strip().split("\n") + if line.strip().startswith("-") + ] + + new_imports = [] + for pkg in new_packages: + package_name = pkg.get(PACKAGE_COL) + if not package_name: + logger.warning("Skipping package with missing name") + continue + + # TODO there are some existing packages that have hyphens instead of . which seems wrong? + # ^ should manually edit these before running this script coz it messes with alphabetical sort + + module_name = package_name.replace("-", ".") + + imports = [ + f"- {module_name}", + f"- {module_name}.aio", + f"- {module_name}.aio.operations", + f"- {module_name}.models", + f"- {module_name}.operations", + ] + + new_imports.extend(imports) + logger.info(f"Generated import statements for {package_name}") + + all_imports = list(set(existing_imports + new_imports)) + + # sort alphabetically + all_imports.sort() + + # format imports with proper indentation + formatted_imports = "\n".join(f" {imp}" for imp in all_imports) + + # replace the imports section + new_imports_section = f"test:\n imports:\n{formatted_imports}\n\n" + updated_content = re.sub( + r"^test:\s*\n\s*imports:.*?^(?=\w)", + new_imports_section, + content, + flags=re.MULTILINE | re.DOTALL, + ) + + try: + with open(CONDA_MGMT_META_YAML_PATH, "w") as file: + file.write(updated_content) + except Exception as e: + logger.error(f"Failed to update {CONDA_MGMT_META_YAML_PATH}: {e}") + result.extend([pkg.get(PACKAGE_COL) for pkg in new_packages]) + + logger.info(f"Added {len(new_packages)} new management plane packages to meta.yaml") + return result + + +# ===================================== +# Helpers for updating release logs +# ===================================== + + +def update_data_plane_release_logs( + package_dict: Dict, + new_data_plane_names: List[str], + release_date: str, +) -> List[str]: + """ + Add and update release logs for data plane conda packages. Release log includes versions of all packages for the release + """ + result = [] + package_to_group = get_package_to_group_mapping() + + # Update all existing data plane release logs by file + # NOTE: for new packages added to an existing release group, this should handle that as well + # as long as conda_release_groups.py was updated to include the new package in the group + + existing_release_logs = glob.glob(os.path.join(CONDA_RELEASE_LOGS_DIR, "*.md")) + for release_log_path in existing_release_logs: + curr_service_name = os.path.basename(release_log_path).replace(".md", "") + # skip azure-mgmt here + if curr_service_name == "azure-mgmt": + continue + if ( + curr_service_name not in package_dict + and curr_service_name not in package_to_group.values() + ): + logger.warning( + f"Existing release log service {curr_service_name} was not found in CSV data, skipping update. It may be deprecated." + ) + result.append(curr_service_name) + continue + + group_name = get_release_group(curr_service_name, package_to_group) + group_data = get_package_group_data(group_name) + + pkg_updates = [] + if group_data: + pkg_names_in_log = group_data["packages"] + for pkg_name in pkg_names_in_log: + pkg = package_dict.get(pkg_name, {}) + version = pkg.get(VERSION_GA_COL) + if version: + pkg_updates.append(f"- {pkg_name}-{version}") + else: + logger.warning( + f"Package {pkg_name} in group {group_name} is missing version info, it may be deprecated. Skipping in release log update" + ) + result.append(pkg_name) + else: + pkg = package_dict.get(curr_service_name, {}) + version = pkg.get(VERSION_GA_COL) + if version: + pkg_updates.append(f"- {curr_service_name}-{version}") + else: + logger.warning( + f"Package {curr_service_name} is missing version info, it may be deprecated. Skipping in release log update" + ) + result.append(curr_service_name) + try: + with open(release_log_path, "r") as f: + existing_content = f.read() + + lines = existing_content.split("\n") + + new_release = f"\n## {release_date}\n\n" + + # check if release is already logged + if new_release in existing_content: + logger.info( + f"Release log for {curr_service_name} already contains entry for {release_date}, overwriting" + ) + # remove existing release section to overwrite + release_idx = lines.index(new_release.strip()) + + ## find next release heading or end of file + next_release_idx = next( + ( + i + for i in range(release_idx + 1, len(lines)) + if lines[i].startswith("## ") + ), + len(lines), + ) + del lines[release_idx:next_release_idx] + + new_release += "### Packages included\n\n" + new_release += "\n".join(pkg_updates) + lines.insert(1, new_release) + + updated_content = "\n".join(lines) + + with open(release_log_path, "w") as f: + f.write(updated_content) + + logger.info(f"Updated release log for {os.path.basename(release_log_path)}") + except Exception as e: + logger.error( + f"Failed to update release log {os.path.basename(release_log_path)}: {e}" + ) + result.append(curr_service_name) + + # Handle brand new packages + for package_name in new_data_plane_names: + pkg = package_dict.get(package_name, {}) + version = pkg.get(VERSION_GA_COL) + + if not version: + logger.warning(f"Skipping {package_name} with missing version") + result.append(package_name) + continue + + # check for group + group_name = get_release_group(package_name, get_package_to_group_mapping()) + group_data = get_package_group_data(group_name) + if group_data: + release_log_path = os.path.join(CONDA_RELEASE_LOGS_DIR, f"{group_name}.md") + else: + release_log_path = os.path.join( + CONDA_RELEASE_LOGS_DIR, f"{package_name}.md" + ) + + if not os.path.exists(release_log_path): + # Add brand new release log file + logger.info(f"Creating new release log for: {group_name}") + + title_parts = group_name.replace("azure-", "").split("-") + title = " ".join(word.title() for word in title_parts) + + content = f"# Azure {title} client library for Python (conda)\n\n" + content += f"## {release_date}\n\n" + content += "### Packages included\n\n" + + pkg_updates = [] + if group_data: + pkg_names_in_log = group_data["packages"] + for pkg_name in pkg_names_in_log: + pkg = package_dict.get(pkg_name, {}) + version = pkg.get(VERSION_GA_COL) + pkg_updates.append(f"- {pkg_name}-{version}") + else: + pkg = package_dict.get(package_name, {}) + version = pkg.get(VERSION_GA_COL) + pkg_updates.append(f"- {package_name}-{version}") + content += "\n".join(pkg_updates) + + try: + with open(release_log_path, "w") as f: + f.write(content) + logger.info(f"Created new release log for {group_name}") + except Exception as e: + logger.error(f"Failed to create release log for {group_name}: {e}") + result.append(group_name) + + else: + logger.info( + f"Release log for {group_name} already exists, check that new package {package_name} is included" + ) + + return result + + +def update_mgmt_plane_release_log( + package_dict: Dict, + all_mgmt_plane_names: List[str], + release_date: str, +) -> List[str]: + """ + Update azure-mgmt release log. + """ + result = [] + + mgmt_log_path = os.path.join(CONDA_RELEASE_LOGS_DIR, "azure-mgmt.md") + if not os.path.exists(mgmt_log_path): + logger.error("Management plane release log azure-mgmt.md does not exist.") + return all_mgmt_plane_names # all new packages need attention + + pkg_updates = [] + for package_name in all_mgmt_plane_names: + pkg = package_dict.get(package_name, {}) + version = pkg.get(VERSION_GA_COL) + + if not version: + logger.warning( + f"Skipping release log update of {package_name} with missing version" + ) + result.append(package_name) + continue + + pkg_updates.append(f"- {package_name}-{version}") + + try: + with open(mgmt_log_path, "r") as f: + existing_content = f.read() + + lines = existing_content.split("\n") + + new_release = f"\n## {release_date}\n\n" + + # check if release is already logged + if new_release in existing_content: + logger.info( + f"Release log for azure-mgmt already contains entry for {release_date}, overwriting" + ) + # remove existing release section to overwrite + release_idx = lines.index(new_release.strip()) + + ## find next release heading or end of file + next_release_idx = next( + ( + i + for i in range(release_idx + 1, len(lines)) + if lines[i].startswith("## ") + ), + len(lines), + ) + del lines[release_idx:next_release_idx] + + new_release += "### Packages included\n\n" + + new_release += "\n".join(pkg_updates) + lines.insert(1, new_release) + updated_content = "\n".join(lines) + + with open(mgmt_log_path, "w") as f: + f.write(updated_content) + except Exception as e: + logger.error(f"Failed to update azure-mgmt release log: {e}") + return all_mgmt_plane_names + + return result + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description="Update conda package files and versions for release." + ) + parser.add_argument( + "--verbose", + action="store_true", + help="Enable debug logging", + ) + + args = parser.parse_args() + configure_logging(args) + + old_date, new_version = update_conda_version() + + # convert to mm/dd/yyyy format for comparison with CSV dates + old_version = old_date.strftime("%m/%d/%Y") + + packages = parse_csv() + if not packages: + logger.error("No packages found in CSV data.") + exit(1) + + # TODO clean this part up + + # Only ship GA packages that are not deprecated + packages = [ + pkg + for pkg in packages + if ( + (pkg.get(VERSION_GA_COL) and pkg.get(LATEST_GA_DATE_COL)) + and not pkg.get(SUPPORT_COL) == "deprecated" + ) + ] + logger.info(f"Filtered to {len(packages)} GA packages") + + data_pkgs, mgmt_pkgs = separate_packages_by_type(packages) + outdated_data_pkgs = [ + pkg for pkg in data_pkgs if package_needs_update(pkg, old_version, is_new=False) + ] + new_data_plane_names = [ + pkg.get(PACKAGE_COL, "") + for pkg in data_pkgs + if package_needs_update(pkg, old_version, is_new=True) + ] + outdated_mgmt_pkgs = [ + pkg for pkg in mgmt_pkgs if package_needs_update(pkg, old_version, is_new=False) + ] + new_mgmt_plane_names = [ + pkg.get(PACKAGE_COL, "") + for pkg in mgmt_pkgs + if package_needs_update(pkg, old_version, is_new=True) + ] + + # map package name to csv row for easy lookup + package_dict = {pkg.get(PACKAGE_COL, ""): pkg for pkg in packages} + + # Extract package names from the filtered lists + outdated_package_names = [ + pkg.get(PACKAGE_COL, "") + for pkg in (outdated_data_pkgs + outdated_mgmt_pkgs) + if pkg.get(PACKAGE_COL) + ] + + # update conda-sdk-client.yml + # TODO handle packages missing from conda-sdk-client that aren't new relative to the last release... + conda_sdk_client_pkgs_result = update_conda_sdk_client_yml( + package_dict, outdated_package_names, new_data_plane_names, new_mgmt_plane_names + ) + + # pre-process bundled packages to minimize file writes for new data plane packages, + # and release logs + bundle_map = map_bundle_to_packages(list(package_dict.keys())) + # TODO testing + print(bundle_map) + + # handle new data plane libraries + new_data_plane_results = add_new_data_plane_packages( + package_dict, bundle_map, new_data_plane_names + ) + exit() + + # handle new mgmt plane libraries + new_mgmt_plane_results = add_new_mgmt_plane_packages(new_mgmt_plane_packages) + + # add/update release logs + data_plane_release_log_results = update_data_plane_release_logs( + package_dict, new_data_plane_names, new_version + ) + + mgmt_plane_release_log_results = update_mgmt_plane_release_log( + package_dict, all_mgmt_packages, new_version + ) + + # TODO AKA link logic + + print("=== REPORT ===") + + if conda_sdk_client_pkgs_result: + print( + "The following packages may require manual adjustments in conda-sdk-client.yml:" + ) + for pkg_name in conda_sdk_client_pkgs_result: + print(f"- {pkg_name}") + + if new_data_plane_results: + print( + "\nThe following new data plane packages may require manual meta.yaml creation or adjustments:" + ) + for pkg_name in new_data_plane_results: + print(f"- {pkg_name}") + + if new_mgmt_plane_results: + print( + "\nThe following new management plane packages may require manual adjustments in azure-mgmt/meta.yaml:" + ) + for pkg_name in new_mgmt_plane_results: + print(f"- {pkg_name}") + + if data_plane_release_log_results: + print( + "\nThe following data plane packages may require manual adjustments in release logs:" + ) + for pkg_name in data_plane_release_log_results: + print(f"- {pkg_name}") diff --git a/eng/tools/azure-sdk-tools/ci_tools/parsing/parse_functions.py b/eng/tools/azure-sdk-tools/ci_tools/parsing/parse_functions.py index 0c5bb8b5ebab..4602a781a16c 100644 --- a/eng/tools/azure-sdk-tools/ci_tools/parsing/parse_functions.py +++ b/eng/tools/azure-sdk-tools/ci_tools/parsing/parse_functions.py @@ -351,12 +351,24 @@ def from_path(cls, parse_directory_or_file: str): def get_build_config(self) -> Optional[Dict[str, Any]]: return get_build_config(self.folder) + def get_conda_config(self) -> Optional[Dict[str, Any]]: + return get_conda_config(self.folder) + def get_config_setting(self, setting: str, default: Any = True) -> Any: return get_config_setting(self.folder, setting, default) def is_reporting_suppressed(self, setting: str) -> bool: return compare_string_to_glob_array(setting, self.get_config_setting("suppressed_skip_warnings", [])) + def is_stable_release(self) -> bool: + """ + Check if this package is a stable release version. + + :rtype: bool + :return: True if this is a stable release, False if beta + """ + return classify_release_type(self.version) == "stable" + def __str__(self): lines = [f"ParsedSetup from {self.folder}"] for attr in [ @@ -453,6 +465,25 @@ def get_build_config(package_path: str) -> Optional[Dict[str, Any]]: except: return {} +def get_conda_config(package_path: str) -> Optional[Dict[str, Any]]: + """ + Attempts to retrieve all values within [tools.azure-sdk-conda] section of a pyproject.toml. + """ + if os.path.isfile(package_path): + package_path = os.path.dirname(package_path) + + toml_file = os.path.join(package_path, "pyproject.toml") + + if os.path.exists(toml_file): + try: + with open(toml_file, "rb") as f: + toml_dict = toml.load(f) + if "tool" in toml_dict: + tool_configs = toml_dict["tool"] + if "azure-sdk-conda" in tool_configs: + return tool_configs["azure-sdk-conda"] + except: + return {} def get_ci_config(package_path: str) -> Optional[Dict[str, Any]]: """ @@ -841,3 +872,16 @@ def compare_string_to_glob_array(string: str, glob_array: List[str]) -> bool: This function is used to easily compare a string to a set of glob strings, if it matches any of them, returns True. """ return any([fnmatch.fnmatch(string, glob) for glob in glob_array]) + + +def classify_release_type(version: str) -> str: + """ + Classify a package version as 'beta' or 'stable' based on version string patterns. + + :param str version: The version string to classify (e.g., "1.0.0", "2.1.0b1", "1.5.0a2") + :rtype: str + :return: Either "beta" or "stable" + """ + if "b" in version.lower(): + return "beta" + return "stable" diff --git a/sdk/communication/azure-communication-callautomation/pyproject.toml b/sdk/communication/azure-communication-callautomation/pyproject.toml index ab509fcf3611..467396480b4f 100644 --- a/sdk/communication/azure-communication-callautomation/pyproject.toml +++ b/sdk/communication/azure-communication-callautomation/pyproject.toml @@ -1,3 +1,7 @@ [tool.azure-sdk-build] pyright = false -verifytypes = false \ No newline at end of file +verifytypes = false + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-communication" \ No newline at end of file diff --git a/sdk/communication/azure-communication-chat/pyproject.toml b/sdk/communication/azure-communication-chat/pyproject.toml index e00361912969..0f2d30efad8f 100644 --- a/sdk/communication/azure-communication-chat/pyproject.toml +++ b/sdk/communication/azure-communication-chat/pyproject.toml @@ -1,2 +1,6 @@ [tool.azure-sdk-build] pyright = false + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-communication" diff --git a/sdk/communication/azure-communication-email/pyproject.toml b/sdk/communication/azure-communication-email/pyproject.toml index e00361912969..0f2d30efad8f 100644 --- a/sdk/communication/azure-communication-email/pyproject.toml +++ b/sdk/communication/azure-communication-email/pyproject.toml @@ -1,2 +1,6 @@ [tool.azure-sdk-build] pyright = false + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-communication" diff --git a/sdk/communication/azure-communication-identity/pyproject.toml b/sdk/communication/azure-communication-identity/pyproject.toml index 9cfeff7ce263..fbc3cd0f05e1 100644 --- a/sdk/communication/azure-communication-identity/pyproject.toml +++ b/sdk/communication/azure-communication-identity/pyproject.toml @@ -1,5 +1,9 @@ [tool.azure-sdk-build] pyright = false +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-communication" + [tool.pytest.ini_options] asyncio_default_fixture_loop_scope = "function" diff --git a/sdk/communication/azure-communication-jobrouter/pyproject.toml b/sdk/communication/azure-communication-jobrouter/pyproject.toml index 47f59e97f35e..9b4e3c02f286 100644 --- a/sdk/communication/azure-communication-jobrouter/pyproject.toml +++ b/sdk/communication/azure-communication-jobrouter/pyproject.toml @@ -1,3 +1,7 @@ [tool.azure-sdk-build] pyright = false sphinx = false + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-communication" diff --git a/sdk/communication/azure-communication-messages/pyproject.toml b/sdk/communication/azure-communication-messages/pyproject.toml index e00361912969..0f2d30efad8f 100644 --- a/sdk/communication/azure-communication-messages/pyproject.toml +++ b/sdk/communication/azure-communication-messages/pyproject.toml @@ -1,2 +1,6 @@ [tool.azure-sdk-build] pyright = false + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-communication" diff --git a/sdk/communication/azure-communication-phonenumbers/pyproject.toml b/sdk/communication/azure-communication-phonenumbers/pyproject.toml index f51cbbbb7407..adac01aee06d 100644 --- a/sdk/communication/azure-communication-phonenumbers/pyproject.toml +++ b/sdk/communication/azure-communication-phonenumbers/pyproject.toml @@ -1,2 +1,6 @@ [tool.azure-sdk-build] -pyright = false \ No newline at end of file +pyright = false + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-communication" \ No newline at end of file diff --git a/sdk/communication/azure-communication-rooms/pyproject.toml b/sdk/communication/azure-communication-rooms/pyproject.toml index e00361912969..0f2d30efad8f 100644 --- a/sdk/communication/azure-communication-rooms/pyproject.toml +++ b/sdk/communication/azure-communication-rooms/pyproject.toml @@ -1,2 +1,6 @@ [tool.azure-sdk-build] pyright = false + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-communication" diff --git a/sdk/communication/azure-communication-sms/pyproject.toml b/sdk/communication/azure-communication-sms/pyproject.toml index e00361912969..0f2d30efad8f 100644 --- a/sdk/communication/azure-communication-sms/pyproject.toml +++ b/sdk/communication/azure-communication-sms/pyproject.toml @@ -1,2 +1,6 @@ [tool.azure-sdk-build] pyright = false + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-communication" diff --git a/sdk/core/azure-core/pyproject.toml b/sdk/core/azure-core/pyproject.toml index c847603044f5..22f7fa7364b7 100644 --- a/sdk/core/azure-core/pyproject.toml +++ b/sdk/core/azure-core/pyproject.toml @@ -20,3 +20,7 @@ name = "no_aiohttp" install = [] uninstall = ["aiohttp"] additional_pytest_args = ["-k", "_async.py"] + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-core" diff --git a/sdk/core/azure-mgmt-core/pyproject.toml b/sdk/core/azure-mgmt-core/pyproject.toml index 2f6cd1709824..5837349771e9 100644 --- a/sdk/core/azure-mgmt-core/pyproject.toml +++ b/sdk/core/azure-mgmt-core/pyproject.toml @@ -4,3 +4,7 @@ verifytypes = true pyright = false breaking = false black = true + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-core" diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/pyproject.toml b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/pyproject.toml index 03685d81d31a..6662340bf319 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/pyproject.toml +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/pyproject.toml @@ -55,3 +55,7 @@ latestdependency = false mindependency = false whl_no_aio = false black = false + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-eventhub" diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/pyproject.toml b/sdk/eventhub/azure-eventhub-checkpointstoreblob/pyproject.toml index 4ea03c30e600..bd54471fd812 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/pyproject.toml +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/pyproject.toml @@ -50,4 +50,8 @@ pyright = false type_check_samples = true verifytypes = false pylint = true -black = false \ No newline at end of file +black = false + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-eventhub" \ No newline at end of file diff --git a/sdk/eventhub/azure-eventhub/pyproject.toml b/sdk/eventhub/azure-eventhub/pyproject.toml index 5437968d6e13..83e31b582699 100644 --- a/sdk/eventhub/azure-eventhub/pyproject.toml +++ b/sdk/eventhub/azure-eventhub/pyproject.toml @@ -46,3 +46,7 @@ pyright = false type_check_samples = true verifytypes = true pylint = true + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-eventhub" diff --git a/sdk/keyvault/azure-keyvault-administration/pyproject.toml b/sdk/keyvault/azure-keyvault-administration/pyproject.toml index e00361912969..f3c3c463cfe0 100644 --- a/sdk/keyvault/azure-keyvault-administration/pyproject.toml +++ b/sdk/keyvault/azure-keyvault-administration/pyproject.toml @@ -1,2 +1,6 @@ [tool.azure-sdk-build] pyright = false + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-keyvault" diff --git a/sdk/keyvault/azure-keyvault-certificates/pyproject.toml b/sdk/keyvault/azure-keyvault-certificates/pyproject.toml index e00361912969..f3c3c463cfe0 100644 --- a/sdk/keyvault/azure-keyvault-certificates/pyproject.toml +++ b/sdk/keyvault/azure-keyvault-certificates/pyproject.toml @@ -1,2 +1,6 @@ [tool.azure-sdk-build] pyright = false + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-keyvault" diff --git a/sdk/keyvault/azure-keyvault-keys/pyproject.toml b/sdk/keyvault/azure-keyvault-keys/pyproject.toml index eec044c8763e..e7036b8d69fa 100644 --- a/sdk/keyvault/azure-keyvault-keys/pyproject.toml +++ b/sdk/keyvault/azure-keyvault-keys/pyproject.toml @@ -47,6 +47,10 @@ pytyped = ["py.typed"] [tool.azure-sdk-build] pyright = false +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-keyvault" + [tool.uv.sources] azure-core = { path = "../../core/azure-core" } azure-keyvault-nspkg = { path = "../../nspkg/azure-keyvault-nspkg" } diff --git a/sdk/keyvault/azure-keyvault-secrets/pyproject.toml b/sdk/keyvault/azure-keyvault-secrets/pyproject.toml index 7272c6cb4690..1b1c5f5f5c9c 100644 --- a/sdk/keyvault/azure-keyvault-secrets/pyproject.toml +++ b/sdk/keyvault/azure-keyvault-secrets/pyproject.toml @@ -1 +1,5 @@ [tool.azure-sdk-build] + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-keyvault" diff --git a/sdk/schemaregistry/azure-schemaregistry-avroencoder/pyproject.toml b/sdk/schemaregistry/azure-schemaregistry-avroencoder/pyproject.toml index 78755ba24174..4b8523b0645a 100644 --- a/sdk/schemaregistry/azure-schemaregistry-avroencoder/pyproject.toml +++ b/sdk/schemaregistry/azure-schemaregistry-avroencoder/pyproject.toml @@ -3,3 +3,7 @@ mypy = true pyright = false type_check_samples = true verifytypes = true + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-schemaregistry" diff --git a/sdk/schemaregistry/azure-schemaregistry/pyproject.toml b/sdk/schemaregistry/azure-schemaregistry/pyproject.toml index 49f351cfcef9..031caadada00 100644 --- a/sdk/schemaregistry/azure-schemaregistry/pyproject.toml +++ b/sdk/schemaregistry/azure-schemaregistry/pyproject.toml @@ -2,3 +2,7 @@ mypy = true pyright = false type_check_samples = true + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-schemaregistry" diff --git a/sdk/storage/azure-storage-blob/pyproject.toml b/sdk/storage/azure-storage-blob/pyproject.toml index 34719d6d26b0..2aff96cb2231 100644 --- a/sdk/storage/azure-storage-blob/pyproject.toml +++ b/sdk/storage/azure-storage-blob/pyproject.toml @@ -5,3 +5,7 @@ type_check_samples = true verifytypes = true strict_sphinx = true black = false + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-storage" diff --git a/sdk/storage/azure-storage-file-datalake/pyproject.toml b/sdk/storage/azure-storage-file-datalake/pyproject.toml index fac4a5d5db0c..9b7b935bb394 100644 --- a/sdk/storage/azure-storage-file-datalake/pyproject.toml +++ b/sdk/storage/azure-storage-file-datalake/pyproject.toml @@ -4,3 +4,7 @@ pyright = false type_check_samples = true verifytypes = false black = false + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-storage" diff --git a/sdk/storage/azure-storage-file-share/pyproject.toml b/sdk/storage/azure-storage-file-share/pyproject.toml index b04c8ccc0c0e..fd91dad74097 100644 --- a/sdk/storage/azure-storage-file-share/pyproject.toml +++ b/sdk/storage/azure-storage-file-share/pyproject.toml @@ -3,3 +3,7 @@ mypy = true pyright = false type_check_samples = true black = false + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-storage" diff --git a/sdk/storage/azure-storage-queue/pyproject.toml b/sdk/storage/azure-storage-queue/pyproject.toml index 7ea997ba706c..6c32f6f3d4b7 100644 --- a/sdk/storage/azure-storage-queue/pyproject.toml +++ b/sdk/storage/azure-storage-queue/pyproject.toml @@ -4,3 +4,7 @@ pyright = false type_check_samples = true verifytypes = true black = true + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-storage"