From 4ba4e7b2415a150ec63002de32e7836871a5922d Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Tue, 2 Dec 2025 13:52:12 -0800 Subject: [PATCH 001/112] update version --- conda/update_conda_files.py | 77 +++++++++++++++++++++++++++++++++++++ 1 file changed, 77 insertions(+) create mode 100644 conda/update_conda_files.py diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py new file mode 100644 index 000000000000..8c93858e4c2a --- /dev/null +++ b/conda/update_conda_files.py @@ -0,0 +1,77 @@ +"""Update package versions, yml files, release-logs, and changelogs for conda packages.""" + +import os +import argparse +import csv +import yaml +from datetime import datetime, timedelta +from dateutil.relativedelta import relativedelta +from ci_tools.logging import logger, configure_logging + +ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) +CONDA_DIR = os.path.join(ROOT_DIR, "conda") + +# paths +CONDA_RECIPES_DIR = os.path.join(CONDA_DIR, "conda-recipes") +CONDA_RELEASE_LOGS_DIR = os.path.join(CONDA_DIR, "conda-releaselogs") +CONDA_ENV_PATH = os.path.join(CONDA_RECIPES_DIR, "conda_env.yml") + +# constants +RELEASE_PERIOD_MONTHS = 3 + +def update_conda_version() -> str: + """Update the AZURESDK_CONDA_VERSION in conda_env.yml and return the new version.""" + + with open(CONDA_ENV_PATH, 'r') as file: + conda_env_data = yaml.safe_load(file) + + current_version = conda_env_data['variables']['AZURESDK_CONDA_VERSION'] + current_date = datetime.strptime(current_version, '%Y.%m.%d') + + new_date = current_date + relativedelta(months=RELEASE_PERIOD_MONTHS) + + # bump version + new_version = new_date.strftime('%Y.%m.%d') + conda_env_data['variables']['AZURESDK_CONDA_VERSION'] = new_version + + with open(CONDA_ENV_PATH, 'w') as file: + yaml.dump(conda_env_data, file, default_flow_style=False, sort_keys=False) + + logger.info(f"Updated AZURESDK_CONDA_VERSION from {current_version} to {new_version}") + + return new_version + + +# read from csv +def parse_csv(): + pass + +# get new data plane libraries + +# get outdated versions + +# handle data yml + +# mgmt yml + +# import tests for data + +# import tests for mgmt + +# update conda-sdk-client + +# release logs + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description="Update conda package files and versions." + ) + + args = parser.parse_args() + + configure_logging(args) + + # Call the update function + update_conda_version() + + From a0212c64ddb6ba09b4a3128e058123a8c72c6107 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Tue, 2 Dec 2025 14:10:09 -0800 Subject: [PATCH 002/112] csv parsing --- conda/update_conda_files.py | 103 ++++++++++++++++++++++++++++-------- 1 file changed, 82 insertions(+), 21 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 8c93858e4c2a..855749acc34a 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -4,47 +4,103 @@ import argparse import csv import yaml +import urllib.request from datetime import datetime, timedelta from dateutil.relativedelta import relativedelta -from ci_tools.logging import logger, configure_logging +from ci_tools.logging import logger, configure_logging +from typing import Dict, List, Optional, Tuple +# paths ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) CONDA_DIR = os.path.join(ROOT_DIR, "conda") - -# paths CONDA_RECIPES_DIR = os.path.join(CONDA_DIR, "conda-recipes") CONDA_RELEASE_LOGS_DIR = os.path.join(CONDA_DIR, "conda-releaselogs") CONDA_ENV_PATH = os.path.join(CONDA_RECIPES_DIR, "conda_env.yml") # constants RELEASE_PERIOD_MONTHS = 3 +AZURE_SDK_CSV_URL = "https://raw.githubusercontent.com/Azure/azure-sdk/main/_data/releases/latest/python-packages.csv" +PACKAGE_COL_NAME = "Package" + def update_conda_version() -> str: """Update the AZURESDK_CONDA_VERSION in conda_env.yml and return the new version.""" - - with open(CONDA_ENV_PATH, 'r') as file: + + with open(CONDA_ENV_PATH, "r") as file: conda_env_data = yaml.safe_load(file) - - current_version = conda_env_data['variables']['AZURESDK_CONDA_VERSION'] - current_date = datetime.strptime(current_version, '%Y.%m.%d') - + + current_version = conda_env_data["variables"]["AZURESDK_CONDA_VERSION"] + current_date = datetime.strptime(current_version, "%Y.%m.%d") + new_date = current_date + relativedelta(months=RELEASE_PERIOD_MONTHS) - # bump version - new_version = new_date.strftime('%Y.%m.%d') - conda_env_data['variables']['AZURESDK_CONDA_VERSION'] = new_version - - with open(CONDA_ENV_PATH, 'w') as file: + # bump version + new_version = new_date.strftime("%Y.%m.%d") + conda_env_data["variables"]["AZURESDK_CONDA_VERSION"] = new_version + + with open(CONDA_ENV_PATH, "w") as file: yaml.dump(conda_env_data, file, default_flow_style=False, sort_keys=False) - - logger.info(f"Updated AZURESDK_CONDA_VERSION from {current_version} to {new_version}") + + logger.info( + f"Updated AZURESDK_CONDA_VERSION from {current_version} to {new_version}" + ) return new_version # read from csv -def parse_csv(): - pass +def parse_csv() -> List[Dict[str, str]]: + """Download and parse the Azure SDK Python packages CSV file.""" + try: + logger.info(f"Downloading CSV from {AZURE_SDK_CSV_URL}") + + with urllib.request.urlopen(AZURE_SDK_CSV_URL) as response: + csv_content = response.read().decode("utf-8") + + # Parse the CSV content + csv_reader = csv.DictReader(csv_content.splitlines()) + packages = list(csv_reader) + + logger.info(f"Successfully parsed {len(packages)} packages from CSV") + + # Log some sample data for debugging + if packages: + logger.debug(f"Sample package data: {packages[0]}") + logger.debug(f"CSV headers: {list(packages[0].keys())}") + + return packages + + except Exception as e: + logger.error(f"Failed to download or parse CSV: {e}") + return [] + + +def is_mgmt_package(pkg_name: str) -> bool: + return pkg_name != "azure-mgmt-core" and ( + "mgmt" in pkg_name or "cognitiveservices" in pkg_name + ) + + +def separate_packages_by_type( + packages: List[Dict[str, str]], +) -> Tuple[List[Dict[str, str]], List[Dict[str, str]]]: + """Separate packages into data plane and management plane libraries.""" + data_plane_packages = [] + mgmt_plane_packages = [] + + for pkg in packages: + package_name = pkg.get(PACKAGE_COL_NAME, "") + if is_mgmt_package(package_name): + mgmt_plane_packages.append(pkg) + else: + data_plane_packages.append(pkg) + + logger.info( + f"Separated {len(data_plane_packages)} data plane and {len(mgmt_plane_packages)} management plane packages" + ) + + return (data_plane_packages, mgmt_plane_packages) + # get new data plane libraries @@ -68,10 +124,15 @@ def parse_csv(): ) args = parser.parse_args() - + configure_logging(args) - # Call the update function - update_conda_version() + new_version = update_conda_version() + # Parse CSV data + packages = parse_csv() + if not packages: + logger.error("No packages found in CSV data.") + exit(1) + data_plane_packages, mgmt_plane_packages = separate_packages_by_type(packages) From 344dc0ef6d5774b040012feb3dab6cd6a6609c3f Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Tue, 2 Dec 2025 15:05:10 -0800 Subject: [PATCH 003/112] progress on checking package versions --- conda/update_conda_files.py | 85 ++++++++++++++++++++++++++++--------- 1 file changed, 66 insertions(+), 19 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 855749acc34a..c80f12963700 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -20,19 +20,22 @@ # constants RELEASE_PERIOD_MONTHS = 3 AZURE_SDK_CSV_URL = "https://raw.githubusercontent.com/Azure/azure-sdk/main/_data/releases/latest/python-packages.csv" -PACKAGE_COL_NAME = "Package" +PACKAGE_COL = "Package" +LATEST_GA_DATE_COL = "LatestGADate" +VERSION_GA_COL = "VersionGA" +FIRST_GA_DATE_COL = "FirstGADate" -def update_conda_version() -> str: - """Update the AZURESDK_CONDA_VERSION in conda_env.yml and return the new version.""" +def update_conda_version() -> Tuple[str, str]: + """Update the AZURESDK_CONDA_VERSION in conda_env.yml and return the old and new versions.""" with open(CONDA_ENV_PATH, "r") as file: conda_env_data = yaml.safe_load(file) - current_version = conda_env_data["variables"]["AZURESDK_CONDA_VERSION"] - current_date = datetime.strptime(current_version, "%Y.%m.%d") + old_version = conda_env_data["variables"]["AZURESDK_CONDA_VERSION"] + old_date = datetime.strptime(old_version, "%Y.%m.%d") - new_date = current_date + relativedelta(months=RELEASE_PERIOD_MONTHS) + new_date = old_date + relativedelta(months=RELEASE_PERIOD_MONTHS) # bump version new_version = new_date.strftime("%Y.%m.%d") @@ -41,11 +44,9 @@ def update_conda_version() -> str: with open(CONDA_ENV_PATH, "w") as file: yaml.dump(conda_env_data, file, default_flow_style=False, sort_keys=False) - logger.info( - f"Updated AZURESDK_CONDA_VERSION from {current_version} to {new_version}" - ) + logger.info(f"Updated AZURESDK_CONDA_VERSION from {old_version} to {new_version}") - return new_version + return old_version, new_version # read from csv @@ -63,11 +64,6 @@ def parse_csv() -> List[Dict[str, str]]: logger.info(f"Successfully parsed {len(packages)} packages from CSV") - # Log some sample data for debugging - if packages: - logger.debug(f"Sample package data: {packages[0]}") - logger.debug(f"CSV headers: {list(packages[0].keys())}") - return packages except Exception as e: @@ -89,7 +85,7 @@ def separate_packages_by_type( mgmt_plane_packages = [] for pkg in packages: - package_name = pkg.get(PACKAGE_COL_NAME, "") + package_name = pkg.get(PACKAGE_COL, "") if is_mgmt_package(package_name): mgmt_plane_packages.append(pkg) else: @@ -102,7 +98,37 @@ def separate_packages_by_type( return (data_plane_packages, mgmt_plane_packages) -# get new data plane libraries +def is_new_package(package_row: Dict[str, str], prev_release_date: str) -> bool: + """Check if the package is new (i.e., FirstGADate is after the last release).""" + firstGA = package_row.get(FIRST_GA_DATE_COL) + if not firstGA: + logger.error( + f"Package {package_row.get(PACKAGE_COL)} missing {FIRST_GA_DATE_COL}" + ) + return False + + try: + # Convert string dates to datetime objects for proper comparison + first_ga_date = datetime.strptime(firstGA, "%m/%d/%Y") + prev_date = datetime.strptime(prev_release_date, "%m/%d/%Y") + return first_ga_date > prev_date + except ValueError as e: + logger.error( + f"Date parsing error for package {package_row.get(PACKAGE_COL)}: {e}" + ) + return False + + +def update_package_versions( + packages: List[Dict[str, str]], prev_release_date: str +) -> List[Dict[str, str]]: + """Update outdated package versions and return new packages""" + new_packages = [] + # todo + + logger.info(f"Detected {len(new_packages)} new packages") + return new_packages + # get outdated versions @@ -120,19 +146,40 @@ def separate_packages_by_type( if __name__ == "__main__": parser = argparse.ArgumentParser( - description="Update conda package files and versions." + description="Update conda package files and versions for release." ) args = parser.parse_args() configure_logging(args) - new_version = update_conda_version() + old_version, new_version = update_conda_version() + # convert to mm/dd/yyyy for comparison + old_date_obj = datetime.strptime(old_version, "%Y.%m.%d") + old_version = old_date_obj.strftime("%m/%d/%Y") # Parse CSV data packages = parse_csv() + if not packages: logger.error("No packages found in CSV data.") exit(1) + # Only ship GA packages + packages = [pkg for pkg in packages if pkg.get(VERSION_GA_COL)] + logger.info(f"Filtered to {len(packages)} GA packages") + data_plane_packages, mgmt_plane_packages = separate_packages_by_type(packages) + + for pkg in data_plane_packages: + logger.info( + f"Processing Data Plane Package: {pkg[PACKAGE_COL]} - Version GA: {pkg[VERSION_GA_COL]} , First GA: {pkg[FIRST_GA_DATE_COL]}, Latest GA: {pkg[LATEST_GA_DATE_COL]}" + ) + if is_new_package(pkg, old_version): + logger.info( + f"New Data Plane Package Detected: {pkg[PACKAGE_COL]} - First GA: {pkg[FIRST_GA_DATE_COL]}" + ) + else: + logger.info( + f"Existing Data Plane Package: {pkg[PACKAGE_COL]} - Latest GA: {pkg[LATEST_GA_DATE_COL]}" + ) From 8648d26fac5c8d2336374c690bc9efbf2f21280f Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Tue, 2 Dec 2025 15:34:55 -0800 Subject: [PATCH 004/112] update package version checking logic --- conda/update_conda_files.py | 67 +++++++++++++++++++++++-------------- 1 file changed, 42 insertions(+), 25 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index c80f12963700..4497c2ab102b 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -26,7 +26,9 @@ FIRST_GA_DATE_COL = "FirstGADate" -def update_conda_version() -> Tuple[str, str]: +def update_conda_version() -> ( + Tuple[datetime, str] +): # TODO do i need the new date anywhere else? i think i may """Update the AZURESDK_CONDA_VERSION in conda_env.yml and return the old and new versions.""" with open(CONDA_ENV_PATH, "r") as file: @@ -46,7 +48,7 @@ def update_conda_version() -> Tuple[str, str]: logger.info(f"Updated AZURESDK_CONDA_VERSION from {old_version} to {new_version}") - return old_version, new_version + return old_date, new_version # read from csv @@ -98,20 +100,39 @@ def separate_packages_by_type( return (data_plane_packages, mgmt_plane_packages) -def is_new_package(package_row: Dict[str, str], prev_release_date: str) -> bool: - """Check if the package is new (i.e., FirstGADate is after the last release).""" - firstGA = package_row.get(FIRST_GA_DATE_COL) - if not firstGA: +def package_needs_update( + package_row: Dict[str, str], prev_release_date: str, is_new=False +) -> bool: + """ + Check if the package is new or needs version update (i.e., FirstGADate or LatestGADate is after the last release). + + :param package_row: The parsed CSV row for the package. + :param prev_release_date: The date of the previous release in "mm/dd/yyyy" format. + :param is_new: Whether to check for new package (FirstGADate) or outdated package (LatestGADate). + :return: if the package is new or needs an update. + """ + compareDate = ( + package_row.get(FIRST_GA_DATE_COL) + if is_new + else package_row.get(LATEST_GA_DATE_COL) + ) + + if not compareDate: logger.error( - f"Package {package_row.get(PACKAGE_COL)} missing {FIRST_GA_DATE_COL}" + f"Package {package_row.get(PACKAGE_COL)} missing {FIRST_GA_DATE_COL if is_new else LATEST_GA_DATE_COL}" ) + + # TODO if date is missing, check PyPi for release date instead return False try: # Convert string dates to datetime objects for proper comparison - first_ga_date = datetime.strptime(firstGA, "%m/%d/%Y") + compare_date = datetime.strptime(compareDate, "%m/%d/%Y") prev_date = datetime.strptime(prev_release_date, "%m/%d/%Y") - return first_ga_date > prev_date + logger.debug( + f"Comparing {package_row.get(PACKAGE_COL)} CompareDate {compare_date} with previous release date {prev_date}" + ) + return compare_date > prev_date except ValueError as e: logger.error( f"Date parsing error for package {package_row.get(PACKAGE_COL)}: {e}" @@ -150,13 +171,13 @@ def update_package_versions( ) args = parser.parse_args() - + args.debug = True configure_logging(args) - old_version, new_version = update_conda_version() - # convert to mm/dd/yyyy for comparison - old_date_obj = datetime.strptime(old_version, "%Y.%m.%d") - old_version = old_date_obj.strftime("%m/%d/%Y") + old_date, new_version = update_conda_version() + + # convert to mm/dd/yyyy format for comparison with CSV dates + old_version = old_date.strftime("%m/%d/%Y") # Parse CSV data packages = parse_csv() @@ -171,15 +192,11 @@ def update_package_versions( data_plane_packages, mgmt_plane_packages = separate_packages_by_type(packages) + # TODO testing for pkg in data_plane_packages: - logger.info( - f"Processing Data Plane Package: {pkg[PACKAGE_COL]} - Version GA: {pkg[VERSION_GA_COL]} , First GA: {pkg[FIRST_GA_DATE_COL]}, Latest GA: {pkg[LATEST_GA_DATE_COL]}" - ) - if is_new_package(pkg, old_version): - logger.info( - f"New Data Plane Package Detected: {pkg[PACKAGE_COL]} - First GA: {pkg[FIRST_GA_DATE_COL]}" - ) - else: - logger.info( - f"Existing Data Plane Package: {pkg[PACKAGE_COL]} - Latest GA: {pkg[LATEST_GA_DATE_COL]}" - ) + if package_needs_update(pkg, old_version, is_new=True): + logger.info(f"New data plane package detected: {pkg.get(PACKAGE_COL)}") + print("---") + for pkg in data_plane_packages: + if package_needs_update(pkg, old_version, is_new=False): + logger.info(f"Outdated data plane package detected: {pkg.get(PACKAGE_COL)}") From a39c9ad053684a372b8c46b4b33b7231846dbdcc Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Wed, 3 Dec 2025 13:19:46 -0800 Subject: [PATCH 005/112] progress on updating vers and getting uri --- conda/update_conda_files.py | 171 +++++++++++++++++++++++++++++------- 1 file changed, 137 insertions(+), 34 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 4497c2ab102b..a3e9e43ad0a7 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -2,6 +2,7 @@ import os import argparse +import json import csv import yaml import urllib.request @@ -16,6 +17,7 @@ CONDA_RECIPES_DIR = os.path.join(CONDA_DIR, "conda-recipes") CONDA_RELEASE_LOGS_DIR = os.path.join(CONDA_DIR, "conda-releaselogs") CONDA_ENV_PATH = os.path.join(CONDA_RECIPES_DIR, "conda_env.yml") +CONDA_CLIENT_YAML_PATH = os.path.join(ROOT_DIR, "eng", "pipelines", "templates", "stages", "conda-sdk-client.yml") # constants RELEASE_PERIOD_MONTHS = 3 @@ -118,11 +120,12 @@ def package_needs_update( ) if not compareDate: - logger.error( - f"Package {package_row.get(PACKAGE_COL)} missing {FIRST_GA_DATE_COL if is_new else LATEST_GA_DATE_COL}" + logger.warning( + f"Package {package_row.get(PACKAGE_COL)} missing {FIRST_GA_DATE_COL if is_new else LATEST_GA_DATE_COL}." ) - # TODO if date is missing, check PyPi for release date instead + # TODO need to verify that this is the desired behavior / we're not skipping needed packages + return False try: @@ -139,31 +142,114 @@ def package_needs_update( ) return False +def get_package_data_from_pypi(package_name: str, version: str) -> Tuple[Optional[str], Optional[str]]: + """Fetch the latest version and download URI for a package from PyPI.""" + pypi_url = f"https://pypi.org/pypi/{package_name}/json" + try: + with urllib.request.urlopen(pypi_url, timeout=10) as response: + data = json.loads(response.read().decode("utf-8")) + + # Get the latest version + latest_version = data["info"]["version"] + + # Construct download URL from releases data + if version in data["releases"] and data["releases"][version]: + # Get the source distribution (sdist) if available, otherwise get the first file + files = data["releases"][version] + source_dist = next((f for f in files if f["packagetype"] == "sdist"), None) + if source_dist: + download_url = source_dist["url"] + return latest_version, download_url + + except Exception as e: + logger.error(f"Failed to fetch download URI from PyPI for {package_name}=={version}: {e}") + return None, None + +def build_package_index(conda_artifacts: List[Dict]) -> Dict[str, Tuple[int, int]]: + """Build an index of package name -> (artifact_idx, checkout_idx) for fast lookups.""" + package_index = {} + for artifact_idx, artifact in enumerate(conda_artifacts): + if 'checkout' in artifact: + for checkout_idx, checkout_item in enumerate(artifact['checkout']): + package_name = checkout_item.get('package') + if package_name: + package_index[package_name] = (artifact_idx, checkout_idx) + return package_index def update_package_versions( packages: List[Dict[str, str]], prev_release_date: str -) -> List[Dict[str, str]]: - """Update outdated package versions and return new packages""" - new_packages = [] - # todo - - logger.info(f"Detected {len(new_packages)} new packages") - return new_packages - - -# get outdated versions - -# handle data yml - -# mgmt yml - -# import tests for data - -# import tests for mgmt - -# update conda-sdk-client - -# release logs +) -> None: + """ + Update outdated package versions in the conda-sdk-client.yml file + :param packages: List of package rows from the CSV. + :param prev_release_date: The date of the previous release in "mm/dd/yyyy" format. + """ + packages_to_update = [] + + for package in packages: + if package_needs_update(package, prev_release_date, is_new=False): + packages_to_update.append((package.get(PACKAGE_COL), package.get(VERSION_GA_COL))) + + if not packages_to_update: + logger.info("No packages need version updates") + return + + logger.info(f"Detected {len(packages_to_update)} packages to update") + + with open(CONDA_CLIENT_YAML_PATH, "r") as file: + conda_client_data = yaml.safe_load(file) + + updated_count = 0 + + # Navigate to the CondaArtifacts section + conda_artifacts = conda_client_data['extends']['parameters']['stages'][0]['jobs'][0]['steps'][0]['parameters']['CondaArtifacts'] + package_index = build_package_index(conda_artifacts) + + for pkg_name, new_version in packages_to_update: + if pkg_name in package_index: + artifact_idx, checkout_idx = package_index[pkg_name] + checkout_item = conda_artifacts[artifact_idx]['checkout'][checkout_idx] + old_version = checkout_item.get('version', '') + checkout_item['version'] = new_version + logger.info(f"Updated {pkg_name}: {old_version} -> {new_version}") + updated_count += 1 + + # update download_uri + if 'download_uri' in checkout_item: + old_uri = checkout_item['download_uri'] + pypi_version, new_uri = get_package_data_from_pypi(pkg_name, new_version) + + if pypi_version != new_version: + logger.error(f"Version mismatch for {pkg_name}: got {new_version} from CSV, but {pypi_version} from PyPi") + + if new_uri: + checkout_item['download_uri'] = new_uri + logger.info(f"Updated download_uri for {pkg_name}: {old_uri} -> {new_uri}") + else: + logger.warning(f"Could not fetch new download_uri for {pkg_name}, keeping existing URI") + else: + logger.warning(f"Package {pkg_name} not found in conda-sdk-client.yml, skipping update") + if updated_count > 0: + with open(CONDA_CLIENT_YAML_PATH, "w") as file: + yaml.dump(conda_client_data, file, default_flow_style=False, sort_keys=False, width=float('inf')) + logger.info(f"Successfully updated {updated_count} package versions in conda-sdk-client.yml") + else: + logger.warning("No packages were found in the YAML file to update") + + +def add_new_data_plane_packages( + new_packages: List[Dict[str, str]] +) -> None: + """Handle adding new data plane packages.""" + # TODO implement logic to add new data plane packages + logger.info(f"Adding {len(new_packages)} new data plane packages") + +def add_new_mgmt_plane_packages( + new_packages: List[Dict[str, str]] +) -> None: + """Handle adding new management plane packages.""" + # TODO implement logic to add new management plane packages + logger.info(f"Adding {len(new_packages)} new management plane packages") if __name__ == "__main__": parser = argparse.ArgumentParser( @@ -190,13 +276,30 @@ def update_package_versions( packages = [pkg for pkg in packages if pkg.get(VERSION_GA_COL)] logger.info(f"Filtered to {len(packages)} GA packages") - data_plane_packages, mgmt_plane_packages = separate_packages_by_type(packages) + # testing + # for pkg in packages: + # if not pkg.get(LATEST_GA_DATE_COL): + # logger.warning(f"Package {pkg.get(PACKAGE_COL)} missing {LATEST_GA_DATE_COL}") + # if not pkg.get(FIRST_GA_DATE_COL): + # logger.warning(f"Package {pkg.get(PACKAGE_COL)} missing {FIRST_GA_DATE_COL}") + + # update existing package versions + update_package_versions(packages, old_version) + + # handle new packages + new_packages = [ + pkg + for pkg in packages + if package_needs_update(pkg, old_version, is_new=True) + ] + new_data_plane_packages, new_mgmt_plane_packages = separate_packages_by_type(new_packages) + + # handle new data plane libraries + + # handle new mgmt plane libraries + + # update conda-sdk-client + + # add/update release logs + - # TODO testing - for pkg in data_plane_packages: - if package_needs_update(pkg, old_version, is_new=True): - logger.info(f"New data plane package detected: {pkg.get(PACKAGE_COL)}") - print("---") - for pkg in data_plane_packages: - if package_needs_update(pkg, old_version, is_new=False): - logger.info(f"Outdated data plane package detected: {pkg.get(PACKAGE_COL)}") From 4f7cac784182d271fbfd046d4382bc85317bb6f5 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Wed, 3 Dec 2025 14:40:56 -0800 Subject: [PATCH 006/112] some progress, but csv method misses packages --- conda/update_conda_files.py | 44 ++++++++++++++++++++----------------- 1 file changed, 24 insertions(+), 20 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index a3e9e43ad0a7..86f5d544f7dc 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -120,10 +120,12 @@ def package_needs_update( ) if not compareDate: - logger.warning( + logger.debug( f"Package {package_row.get(PACKAGE_COL)} missing {FIRST_GA_DATE_COL if is_new else LATEST_GA_DATE_COL}." ) + logger.info(f"Checking {'new package' if is_new else 'outdated package'} for package {package_row.get(PACKAGE_COL)} with against date: {compareDate}") + # TODO need to verify that this is the desired behavior / we're not skipping needed packages return False @@ -151,14 +153,17 @@ def get_package_data_from_pypi(package_name: str, version: str) -> Tuple[Optiona # Get the latest version latest_version = data["info"]["version"] - + print("hello") # Construct download URL from releases data if version in data["releases"] and data["releases"][version]: + print("world") + print(data["releases"][version]) # Get the source distribution (sdist) if available, otherwise get the first file files = data["releases"][version] source_dist = next((f for f in files if f["packagetype"] == "sdist"), None) if source_dist: download_url = source_dist["url"] + logger.info(f"Found download URL for {package_name}=={version}: {download_url}") return latest_version, download_url except Exception as e: @@ -166,7 +171,7 @@ def get_package_data_from_pypi(package_name: str, version: str) -> Tuple[Optiona return None, None def build_package_index(conda_artifacts: List[Dict]) -> Dict[str, Tuple[int, int]]: - """Build an index of package name -> (artifact_idx, checkout_idx) for fast lookups.""" + """Build an index of package name -> (artifact_idx, checkout_idx) for fast lookups in conda-sdk-client.yml.""" package_index = {} for artifact_idx, artifact in enumerate(conda_artifacts): if 'checkout' in artifact: @@ -181,6 +186,7 @@ def update_package_versions( ) -> None: """ Update outdated package versions in the conda-sdk-client.yml file + :param packages: List of package rows from the CSV. :param prev_release_date: The date of the previous release in "mm/dd/yyyy" format. """ @@ -194,7 +200,7 @@ def update_package_versions( logger.info("No packages need version updates") return - logger.info(f"Detected {len(packages_to_update)} packages to update") + logger.info(f"Detected {len(packages_to_update)} outdated package versions to update") with open(CONDA_CLIENT_YAML_PATH, "r") as file: conda_client_data = yaml.safe_load(file) @@ -209,24 +215,30 @@ def update_package_versions( if pkg_name in package_index: artifact_idx, checkout_idx = package_index[pkg_name] checkout_item = conda_artifacts[artifact_idx]['checkout'][checkout_idx] - old_version = checkout_item.get('version', '') - checkout_item['version'] = new_version - logger.info(f"Updated {pkg_name}: {old_version} -> {new_version}") - updated_count += 1 - - # update download_uri - if 'download_uri' in checkout_item: + + # Handle packages with version field + if 'version' in checkout_item: + old_version = checkout_item.get('version', '') + checkout_item['version'] = new_version + logger.info(f"Updated {pkg_name}: {old_version} -> {new_version}") + updated_count += 1 + + # Handle packages with download_uri field + elif 'download_uri' in checkout_item: old_uri = checkout_item['download_uri'] pypi_version, new_uri = get_package_data_from_pypi(pkg_name, new_version) if pypi_version != new_version: - logger.error(f"Version mismatch for {pkg_name}: got {new_version} from CSV, but {pypi_version} from PyPi") + logger.error(f"Version mismatch for {pkg_name}: got {new_version} from CSV, but {pypi_version} from PyPI") if new_uri: checkout_item['download_uri'] = new_uri logger.info(f"Updated download_uri for {pkg_name}: {old_uri} -> {new_uri}") + updated_count += 1 else: logger.warning(f"Could not fetch new download_uri for {pkg_name}, keeping existing URI") + else: + logger.warning(f"Package {pkg_name} has neither 'version' nor 'download_uri' field, skipping update") else: logger.warning(f"Package {pkg_name} not found in conda-sdk-client.yml, skipping update") if updated_count > 0: @@ -276,13 +288,6 @@ def add_new_mgmt_plane_packages( packages = [pkg for pkg in packages if pkg.get(VERSION_GA_COL)] logger.info(f"Filtered to {len(packages)} GA packages") - # testing - # for pkg in packages: - # if not pkg.get(LATEST_GA_DATE_COL): - # logger.warning(f"Package {pkg.get(PACKAGE_COL)} missing {LATEST_GA_DATE_COL}") - # if not pkg.get(FIRST_GA_DATE_COL): - # logger.warning(f"Package {pkg.get(PACKAGE_COL)} missing {FIRST_GA_DATE_COL}") - # update existing package versions update_package_versions(packages, old_version) @@ -302,4 +307,3 @@ def add_new_mgmt_plane_packages( # add/update release logs - From 2841c1db56d1ffc588f7a020873671f3db2d67fe Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Wed, 3 Dec 2025 16:13:10 -0800 Subject: [PATCH 007/112] better handling download_uri --- conda/update_conda_files.py | 53 +++++++++++++++++++++---------------- 1 file changed, 30 insertions(+), 23 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 86f5d544f7dc..52f44b4e8720 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -27,6 +27,11 @@ VERSION_GA_COL = "VersionGA" FIRST_GA_DATE_COL = "FirstGADate" +# packages that should be shipped but are known to be missing from the csv +PACKAGES_WITH_DOWNLOAD_URI = [ + "msal", + "msal-extensions", +] def update_conda_version() -> ( Tuple[datetime, str] @@ -144,7 +149,7 @@ def package_needs_update( ) return False -def get_package_data_from_pypi(package_name: str, version: str) -> Tuple[Optional[str], Optional[str]]: +def get_package_data_from_pypi(package_name: str) -> Tuple[Optional[str], Optional[str]]: """Fetch the latest version and download URI for a package from PyPI.""" pypi_url = f"https://pypi.org/pypi/{package_name}/json" try: @@ -155,19 +160,19 @@ def get_package_data_from_pypi(package_name: str, version: str) -> Tuple[Optiona latest_version = data["info"]["version"] print("hello") # Construct download URL from releases data - if version in data["releases"] and data["releases"][version]: + if latest_version in data["releases"] and data["releases"][latest_version]: print("world") - print(data["releases"][version]) + print(data["releases"][latest_version]) # Get the source distribution (sdist) if available, otherwise get the first file - files = data["releases"][version] + files = data["releases"][latest_version] source_dist = next((f for f in files if f["packagetype"] == "sdist"), None) if source_dist: download_url = source_dist["url"] - logger.info(f"Found download URL for {package_name}=={version}: {download_url}") + logger.info(f"Found download URL for {package_name}=={latest_version}: {download_url}") return latest_version, download_url except Exception as e: - logger.error(f"Failed to fetch download URI from PyPI for {package_name}=={version}: {e}") + logger.error(f"Failed to fetch download URI from PyPI for {package_name}: {e}") return None, None def build_package_index(conda_artifacts: List[Dict]) -> Dict[str, Tuple[int, int]]: @@ -216,31 +221,33 @@ def update_package_versions( artifact_idx, checkout_idx = package_index[pkg_name] checkout_item = conda_artifacts[artifact_idx]['checkout'][checkout_idx] - # Handle packages with version field if 'version' in checkout_item: old_version = checkout_item.get('version', '') checkout_item['version'] = new_version logger.info(f"Updated {pkg_name}: {old_version} -> {new_version}") updated_count += 1 - - # Handle packages with download_uri field - elif 'download_uri' in checkout_item: - old_uri = checkout_item['download_uri'] - pypi_version, new_uri = get_package_data_from_pypi(pkg_name, new_version) - - if pypi_version != new_version: - logger.error(f"Version mismatch for {pkg_name}: got {new_version} from CSV, but {pypi_version} from PyPI") - - if new_uri: - checkout_item['download_uri'] = new_uri - logger.info(f"Updated download_uri for {pkg_name}: {old_uri} -> {new_uri}") - updated_count += 1 - else: - logger.warning(f"Could not fetch new download_uri for {pkg_name}, keeping existing URI") else: - logger.warning(f"Package {pkg_name} has neither 'version' nor 'download_uri' field, skipping update") + logger.warning(f"Package {pkg_name} has no 'version' field, skipping update") else: logger.warning(f"Package {pkg_name} not found in conda-sdk-client.yml, skipping update") + + # handle download_uri for packages known to be missing from the csv + for pkg_name in PACKAGES_WITH_DOWNLOAD_URI: + if pkg_name in package_index: + artifact_idx, checkout_idx = package_index[pkg_name] + checkout_item = conda_artifacts[artifact_idx]['checkout'][checkout_idx] + + curr_download_uri = checkout_item.get('download_uri', '') + latest_version, download_uri = get_package_data_from_pypi(pkg_name) + + # TODO is this right + if curr_download_uri != download_uri: + # version needs update + logger.info(f"Package {pkg_name} download_uri mismatch with PyPi, updating {curr_download_uri} to {download_uri}") + checkout_item['download_uri'] = download_uri + logger.info(f"Updated download_uri for {pkg_name}: {download_uri}") + updated_count += 1 + if updated_count > 0: with open(CONDA_CLIENT_YAML_PATH, "w") as file: yaml.dump(conda_client_data, file, default_flow_style=False, sort_keys=False, width=float('inf')) From e4d2aeff1a413af571b68afa798a46251c84266c Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Wed, 3 Dec 2025 16:21:01 -0800 Subject: [PATCH 008/112] minor cleanup log output --- conda/update_conda_files.py | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 52f44b4e8720..1f3f3a6e240f 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -124,13 +124,13 @@ def package_needs_update( else package_row.get(LATEST_GA_DATE_COL) ) + logger.debug(f"Checking {'new package' if is_new else 'outdated package'} for package {package_row.get(PACKAGE_COL)} with against date: {compareDate}") + if not compareDate: logger.debug( - f"Package {package_row.get(PACKAGE_COL)} missing {FIRST_GA_DATE_COL if is_new else LATEST_GA_DATE_COL}." + f"Package {package_row.get(PACKAGE_COL)} is skipped due to missing {FIRST_GA_DATE_COL if is_new else LATEST_GA_DATE_COL}." ) - logger.info(f"Checking {'new package' if is_new else 'outdated package'} for package {package_row.get(PACKAGE_COL)} with against date: {compareDate}") - # TODO need to verify that this is the desired behavior / we're not skipping needed packages return False @@ -158,11 +158,8 @@ def get_package_data_from_pypi(package_name: str) -> Tuple[Optional[str], Option # Get the latest version latest_version = data["info"]["version"] - print("hello") # Construct download URL from releases data if latest_version in data["releases"] and data["releases"][latest_version]: - print("world") - print(data["releases"][latest_version]) # Get the source distribution (sdist) if available, otherwise get the first file files = data["releases"][latest_version] source_dist = next((f for f in files if f["packagetype"] == "sdist"), None) @@ -240,7 +237,6 @@ def update_package_versions( curr_download_uri = checkout_item.get('download_uri', '') latest_version, download_uri = get_package_data_from_pypi(pkg_name) - # TODO is this right if curr_download_uri != download_uri: # version needs update logger.info(f"Package {pkg_name} download_uri mismatch with PyPi, updating {curr_download_uri} to {download_uri}") From be8b914059c16e1ff937d65e1059d70eb0b6a841 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Mon, 29 Dec 2025 12:24:31 -0800 Subject: [PATCH 009/112] minor progress and clean --- conda/update_conda_files.py | 33 ++++++++++++++++++++++++--------- 1 file changed, 24 insertions(+), 9 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 1f3f3a6e240f..0b1008bded7c 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -118,15 +118,15 @@ def package_needs_update( :param is_new: Whether to check for new package (FirstGADate) or outdated package (LatestGADate). :return: if the package is new or needs an update. """ - compareDate = ( + compare_date = ( package_row.get(FIRST_GA_DATE_COL) if is_new else package_row.get(LATEST_GA_DATE_COL) ) - logger.debug(f"Checking {'new package' if is_new else 'outdated package'} for package {package_row.get(PACKAGE_COL)} with against date: {compareDate}") + logger.debug(f"Checking {'new package' if is_new else 'outdated package'} for package {package_row.get(PACKAGE_COL)} with against date: {compare_date}") - if not compareDate: + if not compare_date: logger.debug( f"Package {package_row.get(PACKAGE_COL)} is skipped due to missing {FIRST_GA_DATE_COL if is_new else LATEST_GA_DATE_COL}." ) @@ -137,7 +137,7 @@ def package_needs_update( try: # Convert string dates to datetime objects for proper comparison - compare_date = datetime.strptime(compareDate, "%m/%d/%Y") + compare_date = datetime.strptime(compare_date, "%m/%d/%Y") prev_date = datetime.strptime(prev_release_date, "%m/%d/%Y") logger.debug( f"Comparing {package_row.get(PACKAGE_COL)} CompareDate {compare_date} with previous release date {prev_date}" @@ -240,8 +240,9 @@ def update_package_versions( if curr_download_uri != download_uri: # version needs update logger.info(f"Package {pkg_name} download_uri mismatch with PyPi, updating {curr_download_uri} to {download_uri}") + checkout_item['version'] = latest_version checkout_item['download_uri'] = download_uri - logger.info(f"Updated download_uri for {pkg_name}: {download_uri}") + logger.info(f"Updated download_uri for {pkg_name} with version {latest_version}: {download_uri}") updated_count += 1 if updated_count > 0: @@ -255,14 +256,27 @@ def update_package_versions( def add_new_data_plane_packages( new_packages: List[Dict[str, str]] ) -> None: - """Handle adding new data plane packages.""" - # TODO implement logic to add new data plane packages + """Create meta.yaml files for new data plane packages and add import tests.""" logger.info(f"Adding {len(new_packages)} new data plane packages") + for pkg in new_packages: + package_name = pkg.get(PACKAGE_COL) + if not package_name: + logger.warning("Skipping package with missing name") + continue + + logger.info(f"Adding new data plane package: {package_name}") + + pkg_yaml_path = os.path.join(CONDA_RECIPES_DIR, package_name, "meta.yaml") + os.makedirs(os.path.dirname(pkg_yaml_path), exist_ok=True) + + with open(pkg_yaml_path, "w") as f: + + def add_new_mgmt_plane_packages( new_packages: List[Dict[str, str]] ) -> None: - """Handle adding new management plane packages.""" + """Update azure-mgmt/meta.yaml with new management libraries, and add import tests.""" # TODO implement logic to add new management plane packages logger.info(f"Adding {len(new_packages)} new management plane packages") @@ -292,6 +306,7 @@ def add_new_mgmt_plane_packages( logger.info(f"Filtered to {len(packages)} GA packages") # update existing package versions + # TODO handle packages missing from conda-sdk-client that aren't new relative to the last release... update_package_versions(packages, old_version) # handle new packages @@ -303,10 +318,10 @@ def add_new_mgmt_plane_packages( new_data_plane_packages, new_mgmt_plane_packages = separate_packages_by_type(new_packages) # handle new data plane libraries + add_new_data_plane_packages(new_data_plane_packages) # handle new mgmt plane libraries # update conda-sdk-client # add/update release logs - From 6fd36d349e454f4b8881d5d8bcebc913a8f108be Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Mon, 29 Dec 2025 12:50:12 -0800 Subject: [PATCH 010/112] minor fix to quote string values in yaml --- conda/update_conda_files.py | 199 +++++++++++++++++++++++++++--------- 1 file changed, 153 insertions(+), 46 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 0b1008bded7c..49d4b2726644 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -17,7 +17,9 @@ CONDA_RECIPES_DIR = os.path.join(CONDA_DIR, "conda-recipes") CONDA_RELEASE_LOGS_DIR = os.path.join(CONDA_DIR, "conda-releaselogs") CONDA_ENV_PATH = os.path.join(CONDA_RECIPES_DIR, "conda_env.yml") -CONDA_CLIENT_YAML_PATH = os.path.join(ROOT_DIR, "eng", "pipelines", "templates", "stages", "conda-sdk-client.yml") +CONDA_CLIENT_YAML_PATH = os.path.join( + ROOT_DIR, "eng", "pipelines", "templates", "stages", "conda-sdk-client.yml" +) # constants RELEASE_PERIOD_MONTHS = 3 @@ -33,6 +35,15 @@ "msal-extensions", ] + +class quoted(str): + pass + + +def quoted_presenter(dumper, data): + return dumper.represent_scalar("tag:yaml.org,2002:str", data, style="'") + + def update_conda_version() -> ( Tuple[datetime, str] ): # TODO do i need the new date anywhere else? i think i may @@ -48,7 +59,9 @@ def update_conda_version() -> ( # bump version new_version = new_date.strftime("%Y.%m.%d") - conda_env_data["variables"]["AZURESDK_CONDA_VERSION"] = new_version + conda_env_data["variables"]["AZURESDK_CONDA_VERSION"] = quoted(new_version) + + yaml.add_representer(quoted, quoted_presenter) with open(CONDA_ENV_PATH, "w") as file: yaml.dump(conda_env_data, file, default_flow_style=False, sort_keys=False) @@ -124,7 +137,9 @@ def package_needs_update( else package_row.get(LATEST_GA_DATE_COL) ) - logger.debug(f"Checking {'new package' if is_new else 'outdated package'} for package {package_row.get(PACKAGE_COL)} with against date: {compare_date}") + logger.debug( + f"Checking {'new package' if is_new else 'outdated package'} for package {package_row.get(PACKAGE_COL)} with against date: {compare_date}" + ) if not compare_date: logger.debug( @@ -149,40 +164,49 @@ def package_needs_update( ) return False -def get_package_data_from_pypi(package_name: str) -> Tuple[Optional[str], Optional[str]]: + +def get_package_data_from_pypi( + package_name: str, +) -> Tuple[Optional[str], Optional[str]]: """Fetch the latest version and download URI for a package from PyPI.""" pypi_url = f"https://pypi.org/pypi/{package_name}/json" try: with urllib.request.urlopen(pypi_url, timeout=10) as response: data = json.loads(response.read().decode("utf-8")) - + # Get the latest version latest_version = data["info"]["version"] # Construct download URL from releases data if latest_version in data["releases"] and data["releases"][latest_version]: # Get the source distribution (sdist) if available, otherwise get the first file files = data["releases"][latest_version] - source_dist = next((f for f in files if f["packagetype"] == "sdist"), None) + source_dist = next( + (f for f in files if f["packagetype"] == "sdist"), None + ) if source_dist: download_url = source_dist["url"] - logger.info(f"Found download URL for {package_name}=={latest_version}: {download_url}") + logger.info( + f"Found download URL for {package_name}=={latest_version}: {download_url}" + ) return latest_version, download_url - + except Exception as e: logger.error(f"Failed to fetch download URI from PyPI for {package_name}: {e}") return None, None + def build_package_index(conda_artifacts: List[Dict]) -> Dict[str, Tuple[int, int]]: """Build an index of package name -> (artifact_idx, checkout_idx) for fast lookups in conda-sdk-client.yml.""" package_index = {} for artifact_idx, artifact in enumerate(conda_artifacts): - if 'checkout' in artifact: - for checkout_idx, checkout_item in enumerate(artifact['checkout']): - package_name = checkout_item.get('package') + if "checkout" in artifact: + for checkout_idx, checkout_item in enumerate(artifact["checkout"]): + package_name = checkout_item.get("package") if package_name: package_index[package_name] = (artifact_idx, checkout_idx) return package_index + def update_package_versions( packages: List[Dict[str, str]], prev_release_date: str ) -> None: @@ -193,16 +217,20 @@ def update_package_versions( :param prev_release_date: The date of the previous release in "mm/dd/yyyy" format. """ packages_to_update = [] - + for package in packages: if package_needs_update(package, prev_release_date, is_new=False): - packages_to_update.append((package.get(PACKAGE_COL), package.get(VERSION_GA_COL))) - + packages_to_update.append( + (package.get(PACKAGE_COL), package.get(VERSION_GA_COL)) + ) + if not packages_to_update: logger.info("No packages need version updates") return - - logger.info(f"Detected {len(packages_to_update)} outdated package versions to update") + + logger.info( + f"Detected {len(packages_to_update)} outdated package versions to update" + ) with open(CONDA_CLIENT_YAML_PATH, "r") as file: conda_client_data = yaml.safe_load(file) @@ -210,52 +238,132 @@ def update_package_versions( updated_count = 0 # Navigate to the CondaArtifacts section - conda_artifacts = conda_client_data['extends']['parameters']['stages'][0]['jobs'][0]['steps'][0]['parameters']['CondaArtifacts'] + conda_artifacts = conda_client_data["extends"]["parameters"]["stages"][0]["jobs"][ + 0 + ]["steps"][0]["parameters"]["CondaArtifacts"] package_index = build_package_index(conda_artifacts) for pkg_name, new_version in packages_to_update: if pkg_name in package_index: artifact_idx, checkout_idx = package_index[pkg_name] - checkout_item = conda_artifacts[artifact_idx]['checkout'][checkout_idx] - - if 'version' in checkout_item: - old_version = checkout_item.get('version', '') - checkout_item['version'] = new_version + checkout_item = conda_artifacts[artifact_idx]["checkout"][checkout_idx] + + if "version" in checkout_item: + old_version = checkout_item.get("version", "") + checkout_item["version"] = new_version logger.info(f"Updated {pkg_name}: {old_version} -> {new_version}") updated_count += 1 else: - logger.warning(f"Package {pkg_name} has no 'version' field, skipping update") + logger.warning( + f"Package {pkg_name} has no 'version' field, skipping update" + ) else: - logger.warning(f"Package {pkg_name} not found in conda-sdk-client.yml, skipping update") + logger.warning( + f"Package {pkg_name} not found in conda-sdk-client.yml, skipping update" + ) # handle download_uri for packages known to be missing from the csv for pkg_name in PACKAGES_WITH_DOWNLOAD_URI: if pkg_name in package_index: artifact_idx, checkout_idx = package_index[pkg_name] - checkout_item = conda_artifacts[artifact_idx]['checkout'][checkout_idx] + checkout_item = conda_artifacts[artifact_idx]["checkout"][checkout_idx] - curr_download_uri = checkout_item.get('download_uri', '') + curr_download_uri = checkout_item.get("download_uri", "") latest_version, download_uri = get_package_data_from_pypi(pkg_name) if curr_download_uri != download_uri: - # version needs update - logger.info(f"Package {pkg_name} download_uri mismatch with PyPi, updating {curr_download_uri} to {download_uri}") - checkout_item['version'] = latest_version - checkout_item['download_uri'] = download_uri - logger.info(f"Updated download_uri for {pkg_name} with version {latest_version}: {download_uri}") + # version needs update + logger.info( + f"Package {pkg_name} download_uri mismatch with PyPi, updating {curr_download_uri} to {download_uri}" + ) + checkout_item["version"] = latest_version + checkout_item["download_uri"] = download_uri + logger.info( + f"Updated download_uri for {pkg_name} with version {latest_version}: {download_uri}" + ) updated_count += 1 if updated_count > 0: with open(CONDA_CLIENT_YAML_PATH, "w") as file: - yaml.dump(conda_client_data, file, default_flow_style=False, sort_keys=False, width=float('inf')) - logger.info(f"Successfully updated {updated_count} package versions in conda-sdk-client.yml") + yaml.dump( + conda_client_data, + file, + default_flow_style=False, + sort_keys=False, + width=float("inf"), + ) + logger.info( + f"Successfully updated {updated_count} package versions in conda-sdk-client.yml" + ) else: logger.warning("No packages were found in the YAML file to update") -def add_new_data_plane_packages( - new_packages: List[Dict[str, str]] -) -> None: +def generate_data_plane_meta_yaml( + package_name: str, version: str, download_uri: Optional[str] +) -> str: + """Generate the meta.yaml content for a data plane package.""" + + # TODO how to determine this? e.g. azure-ai-voicelive uses AGENTS_SOURCE_DISTRIBUTION + src_distribution_env_var = "AGENTS_SOURCE_DISTRIBUTION" # TODO placeholder + + meta_yaml_content = f""" + {{% set name = "{package_name}" %}} + package: + name: "{{ name|lower }}" + version: {{ environ.get('AZURESDK_CONDA_VERSION', '0.0.0') }} + + source: + url: {{ environ.get('{src_distribution_env_var}', '') }} + + build: + noarch: python + number: 0 + script: "{{ PYTHON }} -m pip install . -vv" + + requirements: + host: + - azure-core >={{ environ.get('AZURESDK_CONDA_VERSION', '0.0.0') }} + - azure-identity >={{ environ.get('AZURESDK_CONDA_VERSION', '0.0.0') }} + - cryptography + - pip + - python + - requests-oauthlib >=0.5.0 + - aiohttp + - isodate + run: + - azure-core >={{ environ.get('AZURESDK_CONDA_VERSION', '0.0.0') }} + - azure-identity >={{ environ.get('AZURESDK_CONDA_VERSION', '0.0.0') }} + - cryptography + - python + - requests-oauthlib >=0.5.0 + - aiohttp + - isodate + + test: + imports: + - azure.ai.agents + + about: + home: "https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/ai/azure-ai-agents" + license: MIT + license_family: MIT + license_file: + summary: "Microsoft Azure AI Agents Client Library for Python" + description: | + This is the Microsoft Azure AI Agents Client Library. + Please see https://aka.ms/azsdk/conda/releases/agents for version details. + doc_url: + dev_url: + + extra: + recipe-maintainers: + - xiangyan99 + """ + return meta_yaml_content + + +def add_new_data_plane_packages(new_packages: List[Dict[str, str]]) -> None: """Create meta.yaml files for new data plane packages and add import tests.""" logger.info(f"Adding {len(new_packages)} new data plane packages") @@ -269,17 +377,16 @@ def add_new_data_plane_packages( pkg_yaml_path = os.path.join(CONDA_RECIPES_DIR, package_name, "meta.yaml") os.makedirs(os.path.dirname(pkg_yaml_path), exist_ok=True) - - with open(pkg_yaml_path, "w") as f: + # with open(pkg_yaml_path, "w") as f: -def add_new_mgmt_plane_packages( - new_packages: List[Dict[str, str]] -) -> None: + +def add_new_mgmt_plane_packages(new_packages: List[Dict[str, str]]) -> None: """Update azure-mgmt/meta.yaml with new management libraries, and add import tests.""" # TODO implement logic to add new management plane packages logger.info(f"Adding {len(new_packages)} new management plane packages") + if __name__ == "__main__": parser = argparse.ArgumentParser( description="Update conda package files and versions for release." @@ -306,16 +413,16 @@ def add_new_mgmt_plane_packages( logger.info(f"Filtered to {len(packages)} GA packages") # update existing package versions - # TODO handle packages missing from conda-sdk-client that aren't new relative to the last release... + # TODO handle packages missing from conda-sdk-client that aren't new relative to the last release... update_package_versions(packages, old_version) # handle new packages new_packages = [ - pkg - for pkg in packages - if package_needs_update(pkg, old_version, is_new=True) + pkg for pkg in packages if package_needs_update(pkg, old_version, is_new=True) ] - new_data_plane_packages, new_mgmt_plane_packages = separate_packages_by_type(new_packages) + new_data_plane_packages, new_mgmt_plane_packages = separate_packages_by_type( + new_packages + ) # handle new data plane libraries add_new_data_plane_packages(new_data_plane_packages) From ab12a5674562c1314c7f3c7b2aab9ef481143fb5 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Mon, 29 Dec 2025 17:20:35 -0800 Subject: [PATCH 011/112] minor progress in generating meta.yml --- conda/update_conda_files.py | 86 ++++++++++++++++++++++++++++++++++--- 1 file changed, 79 insertions(+), 7 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 49d4b2726644..cd3647e97a9f 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -6,6 +6,8 @@ import csv import yaml import urllib.request +import glob +import re from datetime import datetime, timedelta from dateutil.relativedelta import relativedelta from ci_tools.logging import logger, configure_logging @@ -13,6 +15,7 @@ # paths ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) +SDK_DIR = os.path.join(ROOT_DIR, "sdk") CONDA_DIR = os.path.join(ROOT_DIR, "conda") CONDA_RECIPES_DIR = os.path.join(CONDA_DIR, "conda-recipes") CONDA_RELEASE_LOGS_DIR = os.path.join(CONDA_DIR, "conda-releaselogs") @@ -41,6 +44,7 @@ class quoted(str): def quoted_presenter(dumper, data): + """YAML presenter to force quotes around a string.""" return dumper.represent_scalar("tag:yaml.org,2002:str", data, style="'") @@ -176,9 +180,8 @@ def get_package_data_from_pypi( # Get the latest version latest_version = data["info"]["version"] - # Construct download URL from releases data if latest_version in data["releases"] and data["releases"][latest_version]: - # Get the source distribution (sdist) if available, otherwise get the first file + # Get the source distribution (sdist) if available files = data["releases"][latest_version] source_dist = next( (f for f in files if f["packagetype"] == "sdist"), None @@ -271,6 +274,12 @@ def update_package_versions( curr_download_uri = checkout_item.get("download_uri", "") latest_version, download_uri = get_package_data_from_pypi(pkg_name) + if not latest_version or not download_uri: + logger.warning( + f"Could not retrieve latest version or download URI for {pkg_name} from PyPI, skipping" + ) + continue + if curr_download_uri != download_uri: # version needs update logger.info( @@ -299,13 +308,68 @@ def update_package_versions( logger.warning("No packages were found in the YAML file to update") +def get_package_path(package_name: str) -> Optional[str]: + """Get filepath to a package from the package name.""" + pattern = os.path.join(SDK_DIR, "*", package_name) + matches = glob.glob(pattern) + if matches: + logger.debug(f"Found package path for {package_name}: {matches[0]}") + return matches[0] + + +def get_package_requirements(package_name: str) -> List[str]: + """Retrieve the install requirements for a package.""" + requirements = [] + try: + package_path = get_package_path(package_name) + if not package_path: + logger.error(f"Could not find package path for {package_name}") + return requirements + + config_path = os.path.join(package_path, "setup.py") + + if os.path.exists(config_path): + with open(config_path, "r") as f: + setup_contents = f.read() + + # A simple regex to find the install_requires list + match = re.search( + r"install_requires=\[(.*?)\]", setup_contents, re.DOTALL + ) + if match: + reqs_str = match.group(1) + # Split by commas and strip whitespace and quotes + requirements = [ + req.strip().strip('"').strip("'") for req in reqs_str.split(",") + ] + else: + config_path = os.path.join(package_path, "pyproject.toml") + if not os.path.exists(config_path): + logger.error(f"No setup.py or pyproject.toml found for {package_name}") + return requirements + # TODO? + + except Exception as e: + logger.error(f"Failed to read install requirements for {package_name}: {e}") + print(f"Requirements for {package_name}: {requirements}") + return requirements + + def generate_data_plane_meta_yaml( - package_name: str, version: str, download_uri: Optional[str] + package_name: str, download_uri: Optional[str] = None ) -> str: """Generate the meta.yaml content for a data plane package.""" - # TODO how to determine this? e.g. azure-ai-voicelive uses AGENTS_SOURCE_DISTRIBUTION - src_distribution_env_var = "AGENTS_SOURCE_DISTRIBUTION" # TODO placeholder + # TODO is it correct that the env var name is arbitrary and replaced in conda_functions.py? + src_distr_name = package_name.split("-")[-1].upper() + src_distribution_env_var = f"{src_distr_name}_SOURCE_DISTRIBUTION" + + pkg_name_normalized = package_name.replace("-", ".") + + # TODO how to get requirements ? + reqs = get_package_requirements(package_name) + + # get about info meta_yaml_content = f""" {{% set name = "{package_name}" %}} @@ -342,7 +406,7 @@ def generate_data_plane_meta_yaml( test: imports: - - azure.ai.agents + - {pkg_name_normalized} about: home: "https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/ai/azure-ai-agents" @@ -360,6 +424,7 @@ def generate_data_plane_meta_yaml( recipe-maintainers: - xiangyan99 """ + print(meta_yaml_content) return meta_yaml_content @@ -378,7 +443,14 @@ def add_new_data_plane_packages(new_packages: List[Dict[str, str]]) -> None: pkg_yaml_path = os.path.join(CONDA_RECIPES_DIR, package_name, "meta.yaml") os.makedirs(os.path.dirname(pkg_yaml_path), exist_ok=True) - # with open(pkg_yaml_path, "w") as f: + meta_yml = generate_data_plane_meta_yaml(package_name) + + try: + with open(pkg_yaml_path, "w") as f: + f.write(meta_yml) + logger.info(f"Created meta.yaml for {package_name} at {pkg_yaml_path}") + except Exception as e: + logger.error(f"Failed to create meta.yaml for {package_name}: {e}") def add_new_mgmt_plane_packages(new_packages: List[Dict[str, str]]) -> None: From 936d6feaabb4ddd121ce92226e37cf131410a2ed Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Mon, 29 Dec 2025 17:35:54 -0800 Subject: [PATCH 012/112] minor --- conda/update_conda_files.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index cd3647e97a9f..09a1f070aea4 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -497,9 +497,12 @@ def add_new_mgmt_plane_packages(new_packages: List[Dict[str, str]]) -> None: ) # handle new data plane libraries - add_new_data_plane_packages(new_data_plane_packages) + if len(new_data_plane_packages) > 0: + add_new_data_plane_packages(new_data_plane_packages) # handle new mgmt plane libraries + # if len(new_mgmt_plane_packages) > 0: + # add_new_mgmt_plane_packages(new_mgmt_plane_packages) # update conda-sdk-client From a6fefc45b79f973f32eadc76042f7d677c510d96 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Tue, 30 Dec 2025 13:38:43 -0800 Subject: [PATCH 013/112] getting parsedsetup requirements..but this isn't everything --- conda/update_conda_files.py | 176 ++++++++++++++++-------------------- 1 file changed, 80 insertions(+), 96 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 09a1f070aea4..8275e82ded18 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -6,11 +6,12 @@ import csv import yaml import urllib.request -import glob import re +import glob from datetime import datetime, timedelta from dateutil.relativedelta import relativedelta from ci_tools.logging import logger, configure_logging +from ci_tools.parsing import get_install_requires, ParsedSetup from typing import Dict, List, Optional, Tuple # paths @@ -75,7 +76,6 @@ def update_conda_version() -> ( return old_date, new_version -# read from csv def parse_csv() -> List[Dict[str, str]]: """Download and parse the Azure SDK Python packages CSV file.""" try: @@ -309,50 +309,43 @@ def update_package_versions( def get_package_path(package_name: str) -> Optional[str]: - """Get filepath to a package from the package name.""" - pattern = os.path.join(SDK_DIR, "*", package_name) - matches = glob.glob(pattern) + pattern = os.path.join(SDK_DIR, "**", package_name) + matches = glob.glob(pattern, recursive=True) if matches: - logger.debug(f"Found package path for {package_name}: {matches[0]}") return matches[0] -def get_package_requirements(package_name: str) -> List[str]: - """Retrieve the install requirements for a package.""" - requirements = [] - try: - package_path = get_package_path(package_name) - if not package_path: - logger.error(f"Could not find package path for {package_name}") - return requirements +def get_package_requirements(package_name: str) -> Tuple[List[str], List[str]]: + """Retrieve the host and run requirements for a data plane package meta.yaml.""" + host_requirements = ["python", "pip"] + run_requirements = ["python"] - config_path = os.path.join(package_path, "setup.py") + package_path = get_package_path(package_name) + if not package_path: + logger.error(f"Could not find package path for {package_name}") + return host_requirements, run_requirements - if os.path.exists(config_path): - with open(config_path, "r") as f: - setup_contents = f.read() + # get requirements from setup.py or pyproject.toml + try: + install_reqs = get_install_requires(package_path) + except ValueError as e: + logger.error(f"No setup.py or pyproject.toml found for {package_name}: {e}") + return host_requirements, run_requirements + + for req in install_reqs: + # TODO ?? is this correct behavior?????? + req_name = re.split(r"[<>=!]", req)[0].strip() + if req_name in ["azure-core", "azure-identity"]: + req_name = ( + f"{req_name} >={{ environ.get('AZURESDK_CONDA_VERSION', '0.0.0') }}" + ) - # A simple regex to find the install_requires list - match = re.search( - r"install_requires=\[(.*?)\]", setup_contents, re.DOTALL - ) - if match: - reqs_str = match.group(1) - # Split by commas and strip whitespace and quotes - requirements = [ - req.strip().strip('"').strip("'") for req in reqs_str.split(",") - ] - else: - config_path = os.path.join(package_path, "pyproject.toml") - if not os.path.exists(config_path): - logger.error(f"No setup.py or pyproject.toml found for {package_name}") - return requirements - # TODO? + host_requirements.append(req_name) + run_requirements.append(req_name) - except Exception as e: - logger.error(f"Failed to read install requirements for {package_name}: {e}") - print(f"Requirements for {package_name}: {requirements}") - return requirements + # TODO there are other requirements to consider... + + return host_requirements, run_requirements def generate_data_plane_meta_yaml( @@ -366,64 +359,55 @@ def generate_data_plane_meta_yaml( pkg_name_normalized = package_name.replace("-", ".") - # TODO how to get requirements ? - reqs = get_package_requirements(package_name) - - # get about info - - meta_yaml_content = f""" - {{% set name = "{package_name}" %}} - package: - name: "{{ name|lower }}" - version: {{ environ.get('AZURESDK_CONDA_VERSION', '0.0.0') }} - - source: - url: {{ environ.get('{src_distribution_env_var}', '') }} - - build: - noarch: python - number: 0 - script: "{{ PYTHON }} -m pip install . -vv" - - requirements: - host: - - azure-core >={{ environ.get('AZURESDK_CONDA_VERSION', '0.0.0') }} - - azure-identity >={{ environ.get('AZURESDK_CONDA_VERSION', '0.0.0') }} - - cryptography - - pip - - python - - requests-oauthlib >=0.5.0 - - aiohttp - - isodate - run: - - azure-core >={{ environ.get('AZURESDK_CONDA_VERSION', '0.0.0') }} - - azure-identity >={{ environ.get('AZURESDK_CONDA_VERSION', '0.0.0') }} - - cryptography - - python - - requests-oauthlib >=0.5.0 - - aiohttp - - isodate - - test: - imports: - - {pkg_name_normalized} - - about: - home: "https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/ai/azure-ai-agents" - license: MIT - license_family: MIT - license_file: - summary: "Microsoft Azure AI Agents Client Library for Python" - description: | - This is the Microsoft Azure AI Agents Client Library. - Please see https://aka.ms/azsdk/conda/releases/agents for version details. - doc_url: - dev_url: - - extra: - recipe-maintainers: - - xiangyan99 - """ + host_reqs, run_reqs = get_package_requirements(package_name) + + # Format requirements with proper YAML indentation + host_reqs_str = "\n - ".join(host_reqs) + run_reqs_str = "\n - ".join(run_reqs) + + # TODO ... check import + # TODO get about info + + meta_yaml_content = f"""{{% set name = "{package_name}" %}} + +package: + name: "{{{{ name|lower }}}}" + version: {{{{ environ.get('AZURESDK_CONDA_VERSION', '0.0.0') }}}} + +source: + url: {{{{ environ.get('{src_distribution_env_var}', '') }}}} + +build: + noarch: python + number: 0 + script: "{{{{ PYTHON }}}} -m pip install . -vv" + +requirements: + host: + - {host_reqs_str} + run: + - {run_reqs_str} + +test: + imports: + - {pkg_name_normalized} + +about: + home: "https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/ai/azure-ai-agents" + license: MIT + license_family: MIT + license_file: + summary: "Microsoft Azure AI Agents Client Library for Python" + description: | + This is the Microsoft Azure AI Agents Client Library. + Please see https://aka.ms/azsdk/conda/releases/agents for version details. + doc_url: + dev_url: + +extra: + recipe-maintainers: + - xiangyan99 +""" print(meta_yaml_content) return meta_yaml_content From d58bc3c98b55e25551a56ce9a90aa7b4776daf3b Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Tue, 30 Dec 2025 13:57:36 -0800 Subject: [PATCH 014/112] minor improvements --- conda/update_conda_files.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 8275e82ded18..70a97d7ef5ba 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -334,10 +334,14 @@ def get_package_requirements(package_name: str) -> Tuple[List[str], List[str]]: for req in install_reqs: # TODO ?? is this correct behavior?????? - req_name = re.split(r"[<>=!]", req)[0].strip() - if req_name in ["azure-core", "azure-identity"]: + req_name = req + name_unpinned = re.split(r"[>=={{ environ.get('AZURESDK_CONDA_VERSION', '0.0.0') }}" + f"{name_unpinned} >={{ environ.get('AZURESDK_CONDA_VERSION', '0.0.0') }}" ) host_requirements.append(req_name) @@ -357,6 +361,7 @@ def generate_data_plane_meta_yaml( src_distr_name = package_name.split("-")[-1].upper() src_distribution_env_var = f"{src_distr_name}_SOURCE_DISTRIBUTION" + # TODO there can be subdirectory packages..... e.g. azure-ai-ml pkg_name_normalized = package_name.replace("-", ".") host_reqs, run_reqs = get_package_requirements(package_name) @@ -408,7 +413,6 @@ def generate_data_plane_meta_yaml( recipe-maintainers: - xiangyan99 """ - print(meta_yaml_content) return meta_yaml_content From 4d92d664514b0e994795f89b65fffb7eb713302e Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Tue, 30 Dec 2025 15:58:30 -0800 Subject: [PATCH 015/112] more requirement fetching logic refinement --- conda/update_conda_files.py | 36 ++++++++++++++++++++++++++---------- 1 file changed, 26 insertions(+), 10 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 70a97d7ef5ba..875137789fa6 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -315,6 +315,17 @@ def get_package_path(package_name: str) -> Optional[str]: return matches[0] +def format_requirement(req: str) -> str: + """Format a requirement string for conda meta.yaml.""" + name_unpinned = re.split(r"[>=={{ environ.get('AZURESDK_CONDA_VERSION', '0.0.0') }}" + return req + + def get_package_requirements(package_name: str) -> Tuple[List[str], List[str]]: """Retrieve the host and run requirements for a data plane package meta.yaml.""" host_requirements = ["python", "pip"] @@ -334,19 +345,24 @@ def get_package_requirements(package_name: str) -> Tuple[List[str], List[str]]: for req in install_reqs: # TODO ?? is this correct behavior?????? - req_name = req - name_unpinned = re.split(r"[>=={{ environ.get('AZURESDK_CONDA_VERSION', '0.0.0') }}" - ) - + req_name = format_requirement(req) host_requirements.append(req_name) run_requirements.append(req_name) + # make sure essential reqs are added if they weren't in install_reqs + # TODO finalize actual list of essentials + for essential_req in [ + "azure-identity", + "azure-core", + "aiohttp", + "requests-oauthlib >=0.5.0", + "cryptography", + ]: + req_name = format_requirement(essential_req) + if req_name not in host_requirements: + host_requirements.append(req_name) + run_requirements.append(req_name) + # TODO there are other requirements to consider... return host_requirements, run_requirements From 20f3d38133e441a72d3e774112d1d6f7bd316b0d Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Tue, 30 Dec 2025 16:05:59 -0800 Subject: [PATCH 016/112] minor --- conda/update_conda_files.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 875137789fa6..2afc37f015c1 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -291,6 +291,10 @@ def update_package_versions( f"Updated download_uri for {pkg_name} with version {latest_version}: {download_uri}" ) updated_count += 1 + else: + logger.warning( + f"Package {pkg_name} not found in conda-sdk-client.yml, skipping download_uri update" + ) if updated_count > 0: with open(CONDA_CLIENT_YAML_PATH, "w") as file: @@ -309,6 +313,7 @@ def update_package_versions( def get_package_path(package_name: str) -> Optional[str]: + """Get the filesystem path of an SDK package given its name.""" pattern = os.path.join(SDK_DIR, "**", package_name) matches = glob.glob(pattern, recursive=True) if matches: @@ -322,7 +327,7 @@ def format_requirement(req: str) -> str: # TODO idk if this is right, certain reqs never seem to have pinned versions like aiohttp or isodate if name_unpinned.startswith("azure-") or name_unpinned in ["msrest"]: - return f"{name_unpinned} >={{ environ.get('AZURESDK_CONDA_VERSION', '0.0.0') }}" + return f"{name_unpinned} >={{{{ environ.get('AZURESDK_CONDA_VERSION', '0.0.0') }}}}" return req From 539044831c4de42e6b48e013bac367e2990d3733 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Tue, 30 Dec 2025 17:04:37 -0800 Subject: [PATCH 017/112] refine metadata extraction --- conda/update_conda_files.py | 114 +++++++++++++++++++++++------------- 1 file changed, 74 insertions(+), 40 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 2afc37f015c1..f2b8daec4964 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -312,12 +312,11 @@ def update_package_versions( logger.warning("No packages were found in the YAML file to update") -def get_package_path(package_name: str) -> Optional[str]: +def get_package_path(package_name: str) -> str: """Get the filesystem path of an SDK package given its name.""" pattern = os.path.join(SDK_DIR, "**", package_name) matches = glob.glob(pattern, recursive=True) - if matches: - return matches[0] + return matches[0] def format_requirement(req: str) -> str: @@ -331,46 +330,70 @@ def format_requirement(req: str) -> str: return req -def get_package_requirements(package_name: str) -> Tuple[List[str], List[str]]: +def get_package_requirements(parsed: ParsedSetup) -> Tuple[List[str], List[str]]: """Retrieve the host and run requirements for a data plane package meta.yaml.""" - host_requirements = ["python", "pip"] - run_requirements = ["python"] + host_requirements = set(["pip"]) + run_requirements = set() - package_path = get_package_path(package_name) - if not package_path: - logger.error(f"Could not find package path for {package_name}") - return host_requirements, run_requirements - - # get requirements from setup.py or pyproject.toml - try: - install_reqs = get_install_requires(package_path) - except ValueError as e: - logger.error(f"No setup.py or pyproject.toml found for {package_name}: {e}") - return host_requirements, run_requirements - - for req in install_reqs: - # TODO ?? is this correct behavior?????? - req_name = format_requirement(req) - host_requirements.append(req_name) - run_requirements.append(req_name) - - # make sure essential reqs are added if they weren't in install_reqs - # TODO finalize actual list of essentials + # TODO finalize actual list of essentials, this is more of a placeholder with reqs idk how to find dynamically for essential_req in [ "azure-identity", "azure-core", + "python", "aiohttp", "requests-oauthlib >=0.5.0", "cryptography", ]: req_name = format_requirement(essential_req) - if req_name not in host_requirements: - host_requirements.append(req_name) - run_requirements.append(req_name) + host_requirements.add(req_name) + run_requirements.add(req_name) + + package_path = get_package_path(parsed.name) + if not package_path: + logger.error(f"Could not find package path for {parsed.name}") + return list(host_requirements), list(run_requirements) + + # get requirements from setup.py or pyproject.toml + install_reqs = parsed.requires + + for req in install_reqs: + req_name = format_requirement(req) + host_requirements.add(req_name) + run_requirements.add(req_name) # TODO there are other requirements to consider... - return host_requirements, run_requirements + return list(host_requirements), list(run_requirements) + + +def get_package_metadata(parsed: ParsedSetup, package_path: str) -> Dict[str, str]: + """Extract metadata for the about section from package.""" + package_name = parsed.name + service_dir = os.path.basename(os.path.dirname(package_path)) + + home_url = f"https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/{service_dir}/{package_name}" + + # Get summary from package metadata or construct default + summary = parsed.classifiers + if summary and any("Description" in c for c in summary): + summary = next( + (c.split("::")[-1].strip() for c in summary if "Description" in c), + f"Microsoft Azure {package_name.replace('azure-', '').title()} Client Library for Python", + ) + else: + # Construct from package name + pkg_display_name = package_name.replace("azure-", "").replace("-", " ").title() + summary = f"Microsoft Azure {pkg_display_name} Client Library for Python" + + # Conda release notes URL - use service shortname + service_shortname = service_dir.replace("-", "") + conda_url = f"https://aka.ms/azsdk/conda/releases/{service_shortname}" + + return { + "home": home_url, + "summary": summary, + "description": f"This is the {summary}.\n Please see {conda_url} for version details.", + } def generate_data_plane_meta_yaml( @@ -382,17 +405,22 @@ def generate_data_plane_meta_yaml( src_distr_name = package_name.split("-")[-1].upper() src_distribution_env_var = f"{src_distr_name}_SOURCE_DISTRIBUTION" - # TODO there can be subdirectory packages..... e.g. azure-ai-ml - pkg_name_normalized = package_name.replace("-", ".") + package_path = get_package_path(package_name) + + # get parsed setup info to extract requirements and metadata + parsed_setup = ParsedSetup.from_path(package_path) - host_reqs, run_reqs = get_package_requirements(package_name) + host_reqs, run_reqs = get_package_requirements(parsed_setup) # Format requirements with proper YAML indentation host_reqs_str = "\n - ".join(host_reqs) run_reqs_str = "\n - ".join(run_reqs) - # TODO ... check import - # TODO get about info + # TODO there can be subdirectory packages..... e.g. azure-ai-ml, may need more import logic + pkg_name_normalized = package_name.replace("-", ".") + + # Get package metadata for about section + metadata = get_package_metadata(parsed_setup, package_path) meta_yaml_content = f"""{{% set name = "{package_name}" %}} @@ -419,14 +447,13 @@ def generate_data_plane_meta_yaml( - {pkg_name_normalized} about: - home: "https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/ai/azure-ai-agents" + home: "{metadata['home']}" license: MIT license_family: MIT license_file: - summary: "Microsoft Azure AI Agents Client Library for Python" + summary: "{metadata['summary']}" description: | - This is the Microsoft Azure AI Agents Client Library. - Please see https://aka.ms/azsdk/conda/releases/agents for version details. + {metadata['description']} doc_url: dev_url: @@ -452,7 +479,14 @@ def add_new_data_plane_packages(new_packages: List[Dict[str, str]]) -> None: pkg_yaml_path = os.path.join(CONDA_RECIPES_DIR, package_name, "meta.yaml") os.makedirs(os.path.dirname(pkg_yaml_path), exist_ok=True) - meta_yml = generate_data_plane_meta_yaml(package_name) + try: + # TODO maybe compile list of failed packages to report at end + meta_yml = generate_data_plane_meta_yaml(package_name) + except Exception as e: + logger.error( + f"Failed to generate meta.yaml content for {package_name} and skipping, error: {e}" + ) + continue try: with open(pkg_yaml_path, "w") as f: From 143e9d229665b88a020f9bd15a400bdaf43e2d0f Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Tue, 30 Dec 2025 17:17:35 -0800 Subject: [PATCH 018/112] about section handling improvements --- conda/update_conda_files.py | 18 ++++-------------- 1 file changed, 4 insertions(+), 14 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index f2b8daec4964..24650826450f 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -373,21 +373,11 @@ def get_package_metadata(parsed: ParsedSetup, package_path: str) -> Dict[str, st home_url = f"https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/{service_dir}/{package_name}" - # Get summary from package metadata or construct default - summary = parsed.classifiers - if summary and any("Description" in c for c in summary): - summary = next( - (c.split("::")[-1].strip() for c in summary if "Description" in c), - f"Microsoft Azure {package_name.replace('azure-', '').title()} Client Library for Python", - ) - else: - # Construct from package name - pkg_display_name = package_name.replace("azure-", "").replace("-", " ").title() - summary = f"Microsoft Azure {pkg_display_name} Client Library for Python" + pkg_display_name = package_name.replace("azure-", "").replace("-", " ").title() + summary = f"Microsoft Azure {pkg_display_name} Client Library for Python" - # Conda release notes URL - use service shortname - service_shortname = service_dir.replace("-", "") - conda_url = f"https://aka.ms/azsdk/conda/releases/{service_shortname}" + # TODO definitely need to check if this is actually correct + conda_url = f"https://aka.ms/azsdk/conda/releases/{service_dir}" return { "home": home_url, From 755ab5e7eb3fe357056b499d8746ab3d4d212146 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Tue, 30 Dec 2025 17:36:20 -0800 Subject: [PATCH 019/112] use metadata from parsedsetup --- conda/update_conda_files.py | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 24650826450f..141be690a810 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -11,7 +11,7 @@ from datetime import datetime, timedelta from dateutil.relativedelta import relativedelta from ci_tools.logging import logger, configure_logging -from ci_tools.parsing import get_install_requires, ParsedSetup +from ci_tools.parsing import ParsedSetup, extract_package_metadata from typing import Dict, List, Optional, Tuple # paths @@ -366,15 +366,19 @@ def get_package_requirements(parsed: ParsedSetup) -> Tuple[List[str], List[str]] return list(host_requirements), list(run_requirements) -def get_package_metadata(parsed: ParsedSetup, package_path: str) -> Dict[str, str]: +def get_package_metadata(package_name: str, package_path: str) -> Dict[str, str]: """Extract metadata for the about section from package.""" - package_name = parsed.name + metadata = extract_package_metadata(package_path) + service_dir = os.path.basename(os.path.dirname(package_path)) home_url = f"https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/{service_dir}/{package_name}" - pkg_display_name = package_name.replace("azure-", "").replace("-", " ").title() - summary = f"Microsoft Azure {pkg_display_name} Client Library for Python" + # TODO check this + if metadata and metadata.get("description"): + summary = metadata["description"] + else: + summary = f"Microsoft Azure {package_name.replace('azure-', '').replace('-', ' ').title()} Client Library for Python" # TODO definitely need to check if this is actually correct conda_url = f"https://aka.ms/azsdk/conda/releases/{service_dir}" @@ -397,9 +401,7 @@ def generate_data_plane_meta_yaml( package_path = get_package_path(package_name) - # get parsed setup info to extract requirements and metadata parsed_setup = ParsedSetup.from_path(package_path) - host_reqs, run_reqs = get_package_requirements(parsed_setup) # Format requirements with proper YAML indentation @@ -410,7 +412,7 @@ def generate_data_plane_meta_yaml( pkg_name_normalized = package_name.replace("-", ".") # Get package metadata for about section - metadata = get_package_metadata(parsed_setup, package_path) + metadata = get_package_metadata(package_name, package_path) meta_yaml_content = f"""{{% set name = "{package_name}" %}} From 95f482380ff8b8dff92a567ae755812a42ea0aee Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Wed, 31 Dec 2025 11:04:32 -0800 Subject: [PATCH 020/112] preserve indent in conda-sdk-client --- conda/update_conda_files.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 141be690a810..4bc699e1ef60 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -210,6 +210,11 @@ def build_package_index(conda_artifacts: List[Dict]) -> Dict[str, Tuple[int, int return package_index +class IndentDumper(yaml.SafeDumper): + def increase_indent(self, flow=False, indentless=False): + return super().increase_indent(flow, False) + + def update_package_versions( packages: List[Dict[str, str]], prev_release_date: str ) -> None: @@ -296,13 +301,18 @@ def update_package_versions( f"Package {pkg_name} not found in conda-sdk-client.yml, skipping download_uri update" ) + # TODO note this dump doesn't preserve some quotes like + # displayName: 'azure-developer-loadtesting' but i don't think those functionally necessary? + if updated_count > 0: with open(CONDA_CLIENT_YAML_PATH, "w") as file: yaml.dump( conda_client_data, file, + Dumper=IndentDumper, default_flow_style=False, sort_keys=False, + indent=2, width=float("inf"), ) logger.info( From ddd9f265b342871c0ba4b4175c0e80b8b5417608 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Wed, 31 Dec 2025 12:56:06 -0800 Subject: [PATCH 021/112] lots of refactoring to update conda-sdk-client --- conda/update_conda_files.py | 140 ++++++++++++++++++++++++++---------- 1 file changed, 103 insertions(+), 37 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 4bc699e1ef60..45888fe519fd 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -24,6 +24,7 @@ CONDA_CLIENT_YAML_PATH = os.path.join( ROOT_DIR, "eng", "pipelines", "templates", "stages", "conda-sdk-client.yml" ) +CONDA_MGMT_META_YAML_PATH = os.path.join(CONDA_RECIPES_DIR, "azure-mgmt", "meta.yaml") # constants RELEASE_PERIOD_MONTHS = 3 @@ -211,47 +212,44 @@ def build_package_index(conda_artifacts: List[Dict]) -> Dict[str, Tuple[int, int class IndentDumper(yaml.SafeDumper): + """Used to preserve indentation levels in conda-sdk-client.yml.""" + def increase_indent(self, flow=False, indentless=False): return super().increase_indent(flow, False) -def update_package_versions( - packages: List[Dict[str, str]], prev_release_date: str +def update_conda_sdk_client_yml( + packages_to_update: List[Dict[str, str]], + new_data_plane_packages: List[Dict[str, str]], + new_mgmt_plane_packages: List[Dict[str, str]], ) -> None: """ - Update outdated package versions in the conda-sdk-client.yml file + Update outdated package versions and add new entries in conda-sdk-client.yml file - :param packages: List of package rows from the CSV. - :param prev_release_date: The date of the previous release in "mm/dd/yyyy" format. + :param packages_to_update: List of package rows from the CSV that need updates. + :param new_data_plane_packages: List of new data plane package rows from the CSV. + :param new_mgmt_plane_packages: List of new management plane package rows from the CSV. """ - packages_to_update = [] - - for package in packages: - if package_needs_update(package, prev_release_date, is_new=False): - packages_to_update.append( - (package.get(PACKAGE_COL), package.get(VERSION_GA_COL)) - ) - - if not packages_to_update: - logger.info("No packages need version updates") - return - - logger.info( - f"Detected {len(packages_to_update)} outdated package versions to update" - ) + updated_count = 0 + added_count = 0 with open(CONDA_CLIENT_YAML_PATH, "r") as file: conda_client_data = yaml.safe_load(file) - updated_count = 0 - - # Navigate to the CondaArtifacts section conda_artifacts = conda_client_data["extends"]["parameters"]["stages"][0]["jobs"][ 0 ]["steps"][0]["parameters"]["CondaArtifacts"] + + # update outdated package versions + + logger.info( + f"Detected {len(packages_to_update)} outdated package versions to update in conda-sdk-client.yml" + ) package_index = build_package_index(conda_artifacts) - for pkg_name, new_version in packages_to_update: + for pkg in packages_to_update: + pkg_name = pkg.get(PACKAGE_COL) + new_version = pkg.get(VERSION_GA_COL) if pkg_name in package_index: artifact_idx, checkout_idx = package_index[pkg_name] checkout_item = conda_artifacts[artifact_idx]["checkout"][checkout_idx] @@ -301,10 +299,67 @@ def update_package_versions( f"Package {pkg_name} not found in conda-sdk-client.yml, skipping download_uri update" ) + # add new data plane packages + + logger.info( + f"Detected {len(new_data_plane_packages)} new data plane packages to add to conda-sdk-client.yml" + ) + + parameters = conda_client_data["parameters"] + + for pkg in new_data_plane_packages: + package_name = pkg.get(PACKAGE_COL) + + if not package_name: + logger.warning("Skipping package with missing name") + continue + + # TODO commented out for testing purposes only + # if package_name in package_index: + # logger.warning( + # f"Package {package_name} already exists in conda-sdk-client.yml, skipping addition" + # ) + # continue + + release_name = f"release_{package_name.replace('-', '_')}" + new_parameter = { + "name": release_name, + "displayName": package_name, + "type": "boolean", + "default": True, + } + + parameters.append(new_parameter) + + # also add to CondaArtifacts + # TODO need to determine how to correctly get the service name + service_name = package_name.replace("azure-", "").replace("-", "") + # TODO how to determine common root + new_artifact_entry = { + "name": package_name, + "common_root": "azure", + "service": service_name, + "in_batch": f"${{{{ parameters.{release_name} }}}}", + "checkout": [{"package": package_name, "version": pkg.get(VERSION_GA_COL)}], + } + + conda_artifacts.append(new_artifact_entry) + + added_count += 1 + logger.info(f"Added new data plane package: {package_name}") + + # add new mgmt plane packages + + logger.info( + f"Detected {len(new_mgmt_plane_packages)} new management plane packages to add to conda-sdk-client.yml" + ) + + # TODO + # TODO note this dump doesn't preserve some quotes like # displayName: 'azure-developer-loadtesting' but i don't think those functionally necessary? - if updated_count > 0: + if updated_count > 0 or added_count > 0: with open(CONDA_CLIENT_YAML_PATH, "w") as file: yaml.dump( conda_client_data, @@ -468,6 +523,9 @@ def generate_data_plane_meta_yaml( def add_new_data_plane_packages(new_packages: List[Dict[str, str]]) -> None: """Create meta.yaml files for new data plane packages and add import tests.""" + if len(new_packages) == 0: + return + logger.info(f"Adding {len(new_packages)} new data plane packages") for pkg in new_packages: @@ -476,7 +534,7 @@ def add_new_data_plane_packages(new_packages: List[Dict[str, str]]) -> None: logger.warning("Skipping package with missing name") continue - logger.info(f"Adding new data plane package: {package_name}") + logger.info(f"Adding new data plane meta.yaml for: {package_name}") pkg_yaml_path = os.path.join(CONDA_RECIPES_DIR, package_name, "meta.yaml") os.makedirs(os.path.dirname(pkg_yaml_path), exist_ok=True) @@ -497,12 +555,18 @@ def add_new_data_plane_packages(new_packages: List[Dict[str, str]]) -> None: except Exception as e: logger.error(f"Failed to create meta.yaml for {package_name}: {e}") + # TODO AKA link stuff needs to happen, either do it or return packages that need action + def add_new_mgmt_plane_packages(new_packages: List[Dict[str, str]]) -> None: """Update azure-mgmt/meta.yaml with new management libraries, and add import tests.""" - # TODO implement logic to add new management plane packages + if len(new_packages) == 0: + return logger.info(f"Adding {len(new_packages)} new management plane packages") + # with open(CONDA_MGMT_META_YAML_PATH, "r") as file: + # mgmt_meta_data = yaml.safe_load(file) + if __name__ == "__main__": parser = argparse.ArgumentParser( @@ -510,7 +574,7 @@ def add_new_mgmt_plane_packages(new_packages: List[Dict[str, str]]) -> None: ) args = parser.parse_args() - args.debug = True + args.debug = True # TODO remove this configure_logging(args) old_date, new_version = update_conda_version() @@ -529,11 +593,9 @@ def add_new_mgmt_plane_packages(new_packages: List[Dict[str, str]]) -> None: packages = [pkg for pkg in packages if pkg.get(VERSION_GA_COL)] logger.info(f"Filtered to {len(packages)} GA packages") - # update existing package versions - # TODO handle packages missing from conda-sdk-client that aren't new relative to the last release... - update_package_versions(packages, old_version) - - # handle new packages + outdated_packages = [ + pkg for pkg in packages if package_needs_update(pkg, old_version, is_new=False) + ] new_packages = [ pkg for pkg in packages if package_needs_update(pkg, old_version, is_new=True) ] @@ -541,13 +603,17 @@ def add_new_mgmt_plane_packages(new_packages: List[Dict[str, str]]) -> None: new_packages ) + # update conda-sdk-client.yml + # TODO handle packages missing from conda-sdk-client that aren't new relative to the last release... + update_conda_sdk_client_yml( + outdated_packages, new_data_plane_packages, new_mgmt_plane_packages + ) + # handle new data plane libraries - if len(new_data_plane_packages) > 0: - add_new_data_plane_packages(new_data_plane_packages) + add_new_data_plane_packages(new_data_plane_packages) # handle new mgmt plane libraries - # if len(new_mgmt_plane_packages) > 0: - # add_new_mgmt_plane_packages(new_mgmt_plane_packages) + add_new_mgmt_plane_packages(new_mgmt_plane_packages) # update conda-sdk-client From 2809bce46c00116782d3b8764a6dcbd701222120 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Wed, 31 Dec 2025 13:32:53 -0800 Subject: [PATCH 022/112] minor service name improvements --- conda/update_conda_files.py | 56 +++++++++++++++++++++++++++++++------ 1 file changed, 48 insertions(+), 8 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 45888fe519fd..c378cdedbcff 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -332,18 +332,18 @@ def update_conda_sdk_client_yml( parameters.append(new_parameter) # also add to CondaArtifacts - # TODO need to determine how to correctly get the service name - service_name = package_name.replace("azure-", "").replace("-", "") - # TODO how to determine common root + common_root, service_name = determine_service_info(package_name) + new_artifact_entry = { "name": package_name, - "common_root": "azure", + "common_root": common_root, "service": service_name, "in_batch": f"${{{{ parameters.{release_name} }}}}", "checkout": [{"package": package_name, "version": pkg.get(VERSION_GA_COL)}], } - conda_artifacts.append(new_artifact_entry) + # append before azure-mgmt entry + conda_artifacts.insert(len(conda_artifacts) - 1, new_artifact_entry) added_count += 1 logger.info(f"Added new data plane package: {package_name}") @@ -356,7 +356,7 @@ def update_conda_sdk_client_yml( # TODO - # TODO note this dump doesn't preserve some quotes like + # TODO note this dump doesn't preserve some quotes like around # displayName: 'azure-developer-loadtesting' but i don't think those functionally necessary? if updated_count > 0 or added_count > 0: @@ -384,6 +384,48 @@ def get_package_path(package_name: str) -> str: return matches[0] +def determine_service_info(package_name: str) -> Tuple[str, str]: + # TODO how to actually determine, this is mostly placeholder + """ + Dynamically determine the common_root and service name based on package name and directory structure. + + :param package_name: The name of the package (e.g., "azure-ai-textanalytics"). + Returns: + Tuple of (common_root, service_name) + """ + # Multi-package services that should have common_root like "azure/servicename" + multi_package_services = { + "azure-storage", + "azure-communication", + "azure-keyvault", + "azure-eventhub", + "azure-schemaregistry", + "azure-ai-vision", + } + + # TODO not all existing packages follow this pattern tho, + # e.g. azure-ai-metricsadvisor has service cognitivelanguage <- does this one even exist anymore? + # e.g. azure-ai-translation-text has service translation + # e.g. azure-digitaltwins-core has service digitaltwins + # e.g. azure-monitor-ingestion has service monitor + + # and some packages don't have a common_root field at all?? + + # TODO idk how to properly get the service name, e.g. azure-ai-voicelive is projects? + service_name = os.path.basename(os.path.dirname(get_package_path(package_name))) + print("!!!") + print(service_name) + + # Determine common_root + if package_name in multi_package_services: + base_service = package_name[6:] # Remove "azure-" prefix + common_root = f"azure/{base_service}" + else: + common_root = "azure" + + return common_root, service_name + + def format_requirement(req: str) -> str: """Format a requirement string for conda meta.yaml.""" name_unpinned = re.split(r"[>= None: # handle new mgmt plane libraries add_new_mgmt_plane_packages(new_mgmt_plane_packages) - # update conda-sdk-client - # add/update release logs From 88c4957d46e148cb0fdf9a6faf46e9d1d5abd68b Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Wed, 31 Dec 2025 14:49:29 -0800 Subject: [PATCH 023/112] minor --- conda/update_conda_files.py | 29 ++++++++--------------------- 1 file changed, 8 insertions(+), 21 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index c378cdedbcff..680dbbaccbb9 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -317,7 +317,7 @@ def update_conda_sdk_client_yml( # TODO commented out for testing purposes only # if package_name in package_index: # logger.warning( - # f"Package {package_name} already exists in conda-sdk-client.yml, skipping addition" + # f"New package {package_name} already exists in conda-sdk-client.yml, skipping addition" # ) # continue @@ -390,19 +390,7 @@ def determine_service_info(package_name: str) -> Tuple[str, str]: Dynamically determine the common_root and service name based on package name and directory structure. :param package_name: The name of the package (e.g., "azure-ai-textanalytics"). - Returns: - Tuple of (common_root, service_name) """ - # Multi-package services that should have common_root like "azure/servicename" - multi_package_services = { - "azure-storage", - "azure-communication", - "azure-keyvault", - "azure-eventhub", - "azure-schemaregistry", - "azure-ai-vision", - } - # TODO not all existing packages follow this pattern tho, # e.g. azure-ai-metricsadvisor has service cognitivelanguage <- does this one even exist anymore? # e.g. azure-ai-translation-text has service translation @@ -411,17 +399,14 @@ def determine_service_info(package_name: str) -> Tuple[str, str]: # and some packages don't have a common_root field at all?? + # services with a shared common root include + # "azure-ai", "azure-mgmt", "azure-storage", "azure-communication", etc. + # TODO idk how to properly get the service name, e.g. azure-ai-voicelive is projects? service_name = os.path.basename(os.path.dirname(get_package_path(package_name))) - print("!!!") - print(service_name) - # Determine common_root - if package_name in multi_package_services: - base_service = package_name[6:] # Remove "azure-" prefix - common_root = f"azure/{base_service}" - else: - common_root = "azure" + # TODO Determine common_root + common_root = "azure" return common_root, service_name @@ -658,3 +643,5 @@ def add_new_mgmt_plane_packages(new_packages: List[Dict[str, str]]) -> None: add_new_mgmt_plane_packages(new_mgmt_plane_packages) # add/update release logs + + # print a final report of changes made From c11d5ab0ad70d3e9712649e2e1526ca425df73b3 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Mon, 5 Jan 2026 11:03:39 -0800 Subject: [PATCH 024/112] basic logic for adding new mgmt packages --- conda/update_conda_files.py | 82 +++++++++++++++++++++++++++++++++---- 1 file changed, 74 insertions(+), 8 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 680dbbaccbb9..29f1b5b4608a 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -321,6 +321,7 @@ def update_conda_sdk_client_yml( # ) # continue + # TODO what is the case where we batch multiple subservices under one??? release_name = f"release_{package_name.replace('-', '_')}" new_parameter = { "name": release_name, @@ -358,6 +359,7 @@ def update_conda_sdk_client_yml( # TODO note this dump doesn't preserve some quotes like around # displayName: 'azure-developer-loadtesting' but i don't think those functionally necessary? + # double check that this is ok, esp for URLs... ^ if updated_count > 0 or added_count > 0: with open(CONDA_CLIENT_YAML_PATH, "w") as file: @@ -385,7 +387,7 @@ def get_package_path(package_name: str) -> str: def determine_service_info(package_name: str) -> Tuple[str, str]: - # TODO how to actually determine, this is mostly placeholder + # TODO how to actually determine?, this is mostly placeholder """ Dynamically determine the common_root and service name based on package name and directory structure. @@ -399,7 +401,7 @@ def determine_service_info(package_name: str) -> Tuple[str, str]: # and some packages don't have a common_root field at all?? - # services with a shared common root include + # services with a shared common root include # "azure-ai", "azure-mgmt", "azure-storage", "azure-communication", etc. # TODO idk how to properly get the service name, e.g. azure-ai-voicelive is projects? @@ -591,8 +593,72 @@ def add_new_mgmt_plane_packages(new_packages: List[Dict[str, str]]) -> None: return logger.info(f"Adding {len(new_packages)} new management plane packages") - # with open(CONDA_MGMT_META_YAML_PATH, "r") as file: - # mgmt_meta_data = yaml.safe_load(file) + # can't use pyyaml due to jinja2 + with open(CONDA_MGMT_META_YAML_PATH, "r") as file: + content = file.read() + + test_match = re.search( + r"^test:\s*\n\s*imports:(.*?)^(?=\w)", content, re.MULTILINE | re.DOTALL + ) + if not test_match: + logger.error("Could not find 'test: imports:' section in meta.yaml") + return + + existing_imports_text = test_match.group(1) + existing_imports = [ + line.strip() + for line in existing_imports_text.strip().split("\n") + if line.strip().startswith("-") + ] + + new_imports = [] + for pkg in new_packages: + package_name = pkg.get(PACKAGE_COL) + if not package_name: + logger.warning("Skipping package with missing name") + continue + + # TODO there are some existing packages that have hyphens instead lf . which must be wrong + # ^ should manually edit these before running this script coz it messes with sort + + # convert package name to module name (e.g., azure-mgmt-advisor -> azure.mgmt.advisor) + module_name = package_name.replace("-", ".") + + # Standard import patterns for mgmt packages + imports = [ + f"- {module_name}", + f"- {module_name}.aio", + f"- {module_name}.aio.operations", + f"- {module_name}.models", + f"- {module_name}.operations", + ] + + new_imports.extend(imports) + logger.info(f"Generated import statements for {package_name}") + + all_imports = list(set(existing_imports + new_imports)) + + # sort alphabetically + all_imports.sort() + + # format imports with proper indentation + formatted_imports = "\n".join(f" {imp}" for imp in all_imports) + + # replace the imports section + new_imports_section = f"test:\n imports:\n{formatted_imports}\n\n" + updated_content = re.sub( + r"^test:\s*\n\s*imports:.*?^(?=\w)", + new_imports_section, + content, + flags=re.MULTILINE | re.DOTALL, + ) + + with open(CONDA_MGMT_META_YAML_PATH, "w") as file: + file.write(updated_content) + + logger.info( + f"Added {len(new_packages)} new management plane packages to meta.yaml in alphabetical order" + ) if __name__ == "__main__": @@ -632,12 +698,12 @@ def add_new_mgmt_plane_packages(new_packages: List[Dict[str, str]]) -> None: # update conda-sdk-client.yml # TODO handle packages missing from conda-sdk-client that aren't new relative to the last release... - update_conda_sdk_client_yml( - outdated_packages, new_data_plane_packages, new_mgmt_plane_packages - ) + # update_conda_sdk_client_yml( + # outdated_packages, new_data_plane_packages, new_mgmt_plane_packages + # ) # handle new data plane libraries - add_new_data_plane_packages(new_data_plane_packages) + # add_new_data_plane_packages(new_data_plane_packages) # handle new mgmt plane libraries add_new_mgmt_plane_packages(new_mgmt_plane_packages) From 88ee5f8b0a958d04fa2bd1d5cf75225df76be869 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Mon, 5 Jan 2026 11:19:32 -0800 Subject: [PATCH 025/112] minor --- conda/update_conda_files.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 29f1b5b4608a..58b9cd33a7a5 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -621,7 +621,6 @@ def add_new_mgmt_plane_packages(new_packages: List[Dict[str, str]]) -> None: # TODO there are some existing packages that have hyphens instead lf . which must be wrong # ^ should manually edit these before running this script coz it messes with sort - # convert package name to module name (e.g., azure-mgmt-advisor -> azure.mgmt.advisor) module_name = package_name.replace("-", ".") # Standard import patterns for mgmt packages @@ -698,12 +697,12 @@ def add_new_mgmt_plane_packages(new_packages: List[Dict[str, str]]) -> None: # update conda-sdk-client.yml # TODO handle packages missing from conda-sdk-client that aren't new relative to the last release... - # update_conda_sdk_client_yml( - # outdated_packages, new_data_plane_packages, new_mgmt_plane_packages - # ) + update_conda_sdk_client_yml( + outdated_packages, new_data_plane_packages, new_mgmt_plane_packages + ) # handle new data plane libraries - # add_new_data_plane_packages(new_data_plane_packages) + add_new_data_plane_packages(new_data_plane_packages) # handle new mgmt plane libraries add_new_mgmt_plane_packages(new_mgmt_plane_packages) From a12ca0bb287d9931a9312b54b0010a08628f55d4 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Mon, 5 Jan 2026 14:41:11 -0800 Subject: [PATCH 026/112] properly add new mgmt entries in conda sdk client --- conda/update_conda_files.py | 108 +++++++++++++++++++++++++++++------- 1 file changed, 88 insertions(+), 20 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 58b9cd33a7a5..831c2a1a3531 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -118,7 +118,7 @@ def separate_packages_by_type( else: data_plane_packages.append(pkg) - logger.info( + logger.debug( f"Separated {len(data_plane_packages)} data plane and {len(mgmt_plane_packages)} management plane packages" ) @@ -202,6 +202,7 @@ def get_package_data_from_pypi( def build_package_index(conda_artifacts: List[Dict]) -> Dict[str, Tuple[int, int]]: """Build an index of package name -> (artifact_idx, checkout_idx) for fast lookups in conda-sdk-client.yml.""" package_index = {} + for artifact_idx, artifact in enumerate(conda_artifacts): if "checkout" in artifact: for checkout_idx, checkout_item in enumerate(artifact["checkout"]): @@ -222,16 +223,18 @@ def update_conda_sdk_client_yml( packages_to_update: List[Dict[str, str]], new_data_plane_packages: List[Dict[str, str]], new_mgmt_plane_packages: List[Dict[str, str]], -) -> None: +) -> List[str]: """ Update outdated package versions and add new entries in conda-sdk-client.yml file :param packages_to_update: List of package rows from the CSV that need updates. :param new_data_plane_packages: List of new data plane package rows from the CSV. :param new_mgmt_plane_packages: List of new management plane package rows from the CSV. + :return: List of package names that were not updated or added and may require manual action. """ updated_count = 0 added_count = 0 + result = [] with open(CONDA_CLIENT_YAML_PATH, "r") as file: conda_client_data = yaml.safe_load(file) @@ -263,10 +266,12 @@ def update_conda_sdk_client_yml( logger.warning( f"Package {pkg_name} has no 'version' field, skipping update" ) + result.append(pkg_name) else: logger.warning( f"Package {pkg_name} not found in conda-sdk-client.yml, skipping update" ) + result.append(pkg_name) # handle download_uri for packages known to be missing from the csv for pkg_name in PACKAGES_WITH_DOWNLOAD_URI: @@ -281,6 +286,7 @@ def update_conda_sdk_client_yml( logger.warning( f"Could not retrieve latest version or download URI for {pkg_name} from PyPI, skipping" ) + result.append(pkg_name) continue if curr_download_uri != download_uri: @@ -298,8 +304,9 @@ def update_conda_sdk_client_yml( logger.warning( f"Package {pkg_name} not found in conda-sdk-client.yml, skipping download_uri update" ) + result.append(pkg_name) - # add new data plane packages + # Add new data plane packages logger.info( f"Detected {len(new_data_plane_packages)} new data plane packages to add to conda-sdk-client.yml" @@ -319,6 +326,7 @@ def update_conda_sdk_client_yml( # logger.warning( # f"New package {package_name} already exists in conda-sdk-client.yml, skipping addition" # ) + # result.append(package_name) # continue # TODO what is the case where we batch multiple subservices under one??? @@ -349,13 +357,42 @@ def update_conda_sdk_client_yml( added_count += 1 logger.info(f"Added new data plane package: {package_name}") - # add new mgmt plane packages + # Add new mgmt plane packages logger.info( f"Detected {len(new_mgmt_plane_packages)} new management plane packages to add to conda-sdk-client.yml" ) - # TODO + # assumes azure-mgmt will always be the last CondaArtifacts entry + azure_mgmt_artifact_checkout = conda_artifacts[-1]["checkout"] + + for pkg in new_mgmt_plane_packages: + package_name = pkg.get(PACKAGE_COL) + + if not package_name: + logger.warning("Skipping package with missing name") + continue + + # TODO commented out for testing purposes only + # if package_name in package_index: + # logger.warning( + # f"New package {package_name} already exists in conda-sdk-client.yml, skipping addition" + # ) + # result.append(package_name) + # continue + + new_mgmt_entry = { + "package": package_name, + "version": pkg.get(VERSION_GA_COL), + } + + azure_mgmt_artifact_checkout.append(new_mgmt_entry) + + added_count += 1 + logger.info(f"Added new management plane package: {package_name}") + + # sort mgmt packages alphabetically + azure_mgmt_artifact_checkout.sort(key=lambda x: x["package"]) # TODO note this dump doesn't preserve some quotes like around # displayName: 'azure-developer-loadtesting' but i don't think those functionally necessary? @@ -377,6 +414,7 @@ def update_conda_sdk_client_yml( ) else: logger.warning("No packages were found in the YAML file to update") + return result def get_package_path(package_name: str) -> str: @@ -550,12 +588,13 @@ def generate_data_plane_meta_yaml( return meta_yaml_content -def add_new_data_plane_packages(new_packages: List[Dict[str, str]]) -> None: +def add_new_data_plane_packages(new_packages: List[Dict[str, str]]) -> List[str]: """Create meta.yaml files for new data plane packages and add import tests.""" if len(new_packages) == 0: - return + return [] logger.info(f"Adding {len(new_packages)} new data plane packages") + result = [] for pkg in new_packages: package_name = pkg.get(PACKAGE_COL) @@ -569,12 +608,12 @@ def add_new_data_plane_packages(new_packages: List[Dict[str, str]]) -> None: os.makedirs(os.path.dirname(pkg_yaml_path), exist_ok=True) try: - # TODO maybe compile list of failed packages to report at end meta_yml = generate_data_plane_meta_yaml(package_name) except Exception as e: logger.error( f"Failed to generate meta.yaml content for {package_name} and skipping, error: {e}" ) + result.append(package_name) continue try: @@ -583,15 +622,18 @@ def add_new_data_plane_packages(new_packages: List[Dict[str, str]]) -> None: logger.info(f"Created meta.yaml for {package_name} at {pkg_yaml_path}") except Exception as e: logger.error(f"Failed to create meta.yaml for {package_name}: {e}") + result.append(package_name) # TODO AKA link stuff needs to happen, either do it or return packages that need action + return result -def add_new_mgmt_plane_packages(new_packages: List[Dict[str, str]]) -> None: +def add_new_mgmt_plane_packages(new_packages: List[Dict[str, str]]) -> List[str]: """Update azure-mgmt/meta.yaml with new management libraries, and add import tests.""" if len(new_packages) == 0: - return + return [] logger.info(f"Adding {len(new_packages)} new management plane packages") + result = [] # can't use pyyaml due to jinja2 with open(CONDA_MGMT_META_YAML_PATH, "r") as file: @@ -602,7 +644,8 @@ def add_new_mgmt_plane_packages(new_packages: List[Dict[str, str]]) -> None: ) if not test_match: logger.error("Could not find 'test: imports:' section in meta.yaml") - return + result.extend([pkg.get(PACKAGE_COL) for pkg in new_packages]) + return result existing_imports_text = test_match.group(1) existing_imports = [ @@ -652,12 +695,15 @@ def add_new_mgmt_plane_packages(new_packages: List[Dict[str, str]]) -> None: flags=re.MULTILINE | re.DOTALL, ) - with open(CONDA_MGMT_META_YAML_PATH, "w") as file: - file.write(updated_content) + try: + with open(CONDA_MGMT_META_YAML_PATH, "w") as file: + file.write(updated_content) + except Exception as e: + logger.error(f"Failed to update {CONDA_MGMT_META_YAML_PATH}: {e}") + result.extend([pkg.get(PACKAGE_COL) for pkg in new_packages]) - logger.info( - f"Added {len(new_packages)} new management plane packages to meta.yaml in alphabetical order" - ) + logger.info(f"Added {len(new_packages)} new management plane packages to meta.yaml") + return result if __name__ == "__main__": @@ -697,16 +743,38 @@ def add_new_mgmt_plane_packages(new_packages: List[Dict[str, str]]) -> None: # update conda-sdk-client.yml # TODO handle packages missing from conda-sdk-client that aren't new relative to the last release... - update_conda_sdk_client_yml( + + conda_sdk_client_pkgs_result = update_conda_sdk_client_yml( outdated_packages, new_data_plane_packages, new_mgmt_plane_packages ) # handle new data plane libraries - add_new_data_plane_packages(new_data_plane_packages) + new_data_plane_results = add_new_data_plane_packages(new_data_plane_packages) # handle new mgmt plane libraries - add_new_mgmt_plane_packages(new_mgmt_plane_packages) + new_mgmt_plane_results = add_new_mgmt_plane_packages(new_mgmt_plane_packages) # add/update release logs - # print a final report of changes made + print("=== REPORT ===") + + if conda_sdk_client_pkgs_result: + print( + "The following packages may require manual adjustments in conda-sdk-client.yml:" + ) + for pkg_name in conda_sdk_client_pkgs_result: + print(f"- {pkg_name}") + + if new_data_plane_results: + print( + "\nThe following new data plane packages may require manual meta.yaml creation or adjustments:" + ) + for pkg_name in new_data_plane_results: + print(f"- {pkg_name}") + + if new_mgmt_plane_results: + print( + "\nThe following new management plane packages may require manual adjustments in azure-mgmt/meta.yaml:" + ) + for pkg_name in new_mgmt_plane_results: + print(f"- {pkg_name}") From 1e4cc225008d2f29c143232d3ab64e54498a6ad0 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Mon, 5 Jan 2026 17:07:48 -0800 Subject: [PATCH 027/112] initial release log script but i need to use more csv data to group packages --- conda/update_conda_files.py | 91 ++++++++++++++++++++++++++++++++++--- 1 file changed, 85 insertions(+), 6 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 831c2a1a3531..ca9f1500782d 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -52,7 +52,7 @@ def quoted_presenter(dumper, data): def update_conda_version() -> ( Tuple[datetime, str] -): # TODO do i need the new date anywhere else? i think i may +): """Update the AZURESDK_CONDA_VERSION in conda_env.yml and return the old and new versions.""" with open(CONDA_ENV_PATH, "r") as file: @@ -624,8 +624,7 @@ def add_new_data_plane_packages(new_packages: List[Dict[str, str]]) -> List[str] logger.error(f"Failed to create meta.yaml for {package_name}: {e}") result.append(package_name) - # TODO AKA link stuff needs to happen, either do it or return packages that need action - return result + return result def add_new_mgmt_plane_packages(new_packages: List[Dict[str, str]]) -> List[str]: @@ -661,12 +660,11 @@ def add_new_mgmt_plane_packages(new_packages: List[Dict[str, str]]) -> List[str] logger.warning("Skipping package with missing name") continue - # TODO there are some existing packages that have hyphens instead lf . which must be wrong - # ^ should manually edit these before running this script coz it messes with sort + # TODO there are some existing packages that have hyphens instead of . which seems wrong? + # ^ should manually edit these before running this script coz it messes with alphabetical sort module_name = package_name.replace("-", ".") - # Standard import patterns for mgmt packages imports = [ f"- {module_name}", f"- {module_name}.aio", @@ -705,6 +703,79 @@ def add_new_mgmt_plane_packages(new_packages: List[Dict[str, str]]) -> List[str] logger.info(f"Added {len(new_packages)} new management plane packages to meta.yaml") return result +def update_release_logs(packages_to_update: List[Dict[str, str]], release_date: str) -> List[str]: + """Add and update release logs for conda packages.""" + result = [] + + # update mgmt release log separately + mgmt_release_log_path = os.path.join(CONDA_RELEASE_LOGS_DIR, "azure-mgmt.md") + + # update data plane release logs + for pkg in packages_to_update: + package_name = pkg.get(PACKAGE_COL) + version = pkg.get(VERSION_GA_COL) + + if not package_name: + logger.warning("Skipping package with missing name") + continue + + if not version: + logger.warning(f"Skipping {package_name} with missing version") + result.append(package_name) + continue + + release_log_path = os.path.join( + CONDA_RELEASE_LOGS_DIR, f"{package_name}.md" + ) + + if not os.path.exists(release_log_path): + # Add new release log + logger.info(f"Creating new release log for: {package_name}") + + try: + title_parts = package_name.replace("azure-", "").split("-") + title = " ".join(word.title() for word in title_parts) + + content = f"# Azure {title} client library for Python (conda)\n\n" + content += f"## {release_date}\n\n" + content += "### Packages included\n\n" + + # TODO what about when there's multiple packages...e.g. azure-schemaregistry + content += f"- {package_name}-{version}\n" + + with open(release_log_path, "w") as f: + f.write(content) + logger.info(f"Created new release log for {package_name}") + except Exception as e: + logger.error(f"Failed to create release log for {package_name}: {e}") + result.append(package_name) + + else: + # Update existing release log + try: + with open(release_log_path, "r") as f: + existing_content = f.read() + + lines = existing_content.split("\n") + + new_release = f"\n## {release_date}\n\n" + new_release += "### Packages included\n\n" + new_release += f"- {package_name}-{version}\n" + + lines.insert(1, new_release) + updated_content = "\n".join(lines) + + with open(release_log_path, "w") as f: + f.write(updated_content) + + logger.info(f"Updated release log for {package_name}") + except Exception as e: + logger.error(f"Failed to update release log for {package_name}: {e}") + result.append(package_name) + + # TODO AKA link pointing to new release logs needs to happen + + return result if __name__ == "__main__": parser = argparse.ArgumentParser( @@ -755,6 +826,7 @@ def add_new_mgmt_plane_packages(new_packages: List[Dict[str, str]]) -> List[str] new_mgmt_plane_results = add_new_mgmt_plane_packages(new_mgmt_plane_packages) # add/update release logs + release_log_results = update_release_logs(outdated_packages + new_packages, new_version) print("=== REPORT ===") @@ -778,3 +850,10 @@ def add_new_mgmt_plane_packages(new_packages: List[Dict[str, str]]) -> List[str] ) for pkg_name in new_mgmt_plane_results: print(f"- {pkg_name}") + + if release_log_results: + print( + "\nThe following packages may require manual adjustments in release logs:" + ) + for pkg_name in release_log_results: + print(f"- {pkg_name}") From 5671be897e1850bac91d362ec8a52b9008c23d78 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Mon, 5 Jan 2026 18:06:18 -0800 Subject: [PATCH 028/112] some progress in using repopath for service --- conda/update_conda_files.py | 99 ++++++++++++++++++++++--------------- 1 file changed, 60 insertions(+), 39 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index ca9f1500782d..68e164a9a06d 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -33,6 +33,10 @@ LATEST_GA_DATE_COL = "LatestGADate" VERSION_GA_COL = "VersionGA" FIRST_GA_DATE_COL = "FirstGADate" +DISPLAY_NAME_COL = "DisplayName" +SERVICE_NAME_COL = "ServiceName" +REPO_PATH_COL = "RepoPath" +TYPE_COL = "Type" # packages that should be shipped but are known to be missing from the csv PACKAGES_WITH_DOWNLOAD_URI = [ @@ -50,9 +54,7 @@ def quoted_presenter(dumper, data): return dumper.represent_scalar("tag:yaml.org,2002:str", data, style="'") -def update_conda_version() -> ( - Tuple[datetime, str] -): +def update_conda_version() -> Tuple[datetime, str]: """Update the AZURESDK_CONDA_VERSION in conda_env.yml and return the old and new versions.""" with open(CONDA_ENV_PATH, "r") as file: @@ -98,10 +100,17 @@ def parse_csv() -> List[Dict[str, str]]: return [] -def is_mgmt_package(pkg_name: str) -> bool: - return pkg_name != "azure-mgmt-core" and ( - "mgmt" in pkg_name or "cognitiveservices" in pkg_name - ) +def is_mgmt_package(pkg: Dict[str, str]) -> bool: + pkg_name = pkg.get(PACKAGE_COL, "") + _type = pkg.get(TYPE_COL, "") + if _type == "mgmt": + return True + elif _type == "client": + return False + else: + return pkg_name != "azure-mgmt-core" and ( + "mgmt" in pkg_name or "cognitiveservices" in pkg_name + ) def separate_packages_by_type( @@ -112,8 +121,7 @@ def separate_packages_by_type( mgmt_plane_packages = [] for pkg in packages: - package_name = pkg.get(PACKAGE_COL, "") - if is_mgmt_package(package_name): + if is_mgmt_package(pkg): mgmt_plane_packages.append(pkg) else: data_plane_packages.append(pkg) @@ -312,6 +320,13 @@ def update_conda_sdk_client_yml( f"Detected {len(new_data_plane_packages)} new data plane packages to add to conda-sdk-client.yml" ) + # TODO when do we batch packages together that have the same root repoPath?? + # e.g. 'core' encompasses azure-core and azure-common: + # there's only 1 parameter release_azure_core, and those packages are grouped under the same checkout + + # however, 'ai' packages have multiple params + # e.g. azure-ai-agents and azure-ai-projects are separated + parameters = conda_client_data["parameters"] for pkg in new_data_plane_packages: @@ -330,6 +345,7 @@ def update_conda_sdk_client_yml( # continue # TODO what is the case where we batch multiple subservices under one??? + release_name = f"release_{package_name.replace('-', '_')}" new_parameter = { "name": release_name, @@ -424,26 +440,23 @@ def get_package_path(package_name: str) -> str: return matches[0] -def determine_service_info(package_name: str) -> Tuple[str, str]: +def determine_service_info(pkg: Dict[str, str]) -> Tuple[str, str]: # TODO how to actually determine?, this is mostly placeholder """ Dynamically determine the common_root and service name based on package name and directory structure. :param package_name: The name of the package (e.g., "azure-ai-textanalytics"). """ - # TODO not all existing packages follow this pattern tho, - # e.g. azure-ai-metricsadvisor has service cognitivelanguage <- does this one even exist anymore? - # e.g. azure-ai-translation-text has service translation - # e.g. azure-digitaltwins-core has service digitaltwins - # e.g. azure-monitor-ingestion has service monitor + package_name = pkg.get(PACKAGE_COL, "") + service_name = pkg.get(REPO_PATH_COL, "") - # and some packages don't have a common_root field at all?? + # TODO not all existing packages follow this pattern + # - some packages in the yml don't have a common_root field at all?? + # - communication has common root of azure/communication instead of azure + # - azure-ai-voicelive's service name is currently projects? - # services with a shared common root include - # "azure-ai", "azure-mgmt", "azure-storage", "azure-communication", etc. - - # TODO idk how to properly get the service name, e.g. azure-ai-voicelive is projects? - service_name = os.path.basename(os.path.dirname(get_package_path(package_name))) + if not service_name: + service_name = os.path.basename(os.path.dirname(get_package_path(package_name))) # TODO Determine common_root common_root = "azure" @@ -703,7 +716,10 @@ def add_new_mgmt_plane_packages(new_packages: List[Dict[str, str]]) -> List[str] logger.info(f"Added {len(new_packages)} new management plane packages to meta.yaml") return result -def update_release_logs(packages_to_update: List[Dict[str, str]], release_date: str) -> List[str]: + +def update_release_logs( + packages_to_update: List[Dict[str, str]], release_date: str +) -> List[str]: """Add and update release logs for conda packages.""" result = [] @@ -714,35 +730,33 @@ def update_release_logs(packages_to_update: List[Dict[str, str]], release_date: for pkg in packages_to_update: package_name = pkg.get(PACKAGE_COL) version = pkg.get(VERSION_GA_COL) - + if not package_name: logger.warning("Skipping package with missing name") continue - + if not version: logger.warning(f"Skipping {package_name} with missing version") result.append(package_name) continue - release_log_path = os.path.join( - CONDA_RELEASE_LOGS_DIR, f"{package_name}.md" - ) + release_log_path = os.path.join(CONDA_RELEASE_LOGS_DIR, f"{package_name}.md") if not os.path.exists(release_log_path): # Add new release log logger.info(f"Creating new release log for: {package_name}") - + try: title_parts = package_name.replace("azure-", "").split("-") title = " ".join(word.title() for word in title_parts) - + content = f"# Azure {title} client library for Python (conda)\n\n" content += f"## {release_date}\n\n" content += "### Packages included\n\n" # TODO what about when there's multiple packages...e.g. azure-schemaregistry content += f"- {package_name}-{version}\n" - + with open(release_log_path, "w") as f: f.write(content) logger.info(f"Created new release log for {package_name}") @@ -755,35 +769,40 @@ def update_release_logs(packages_to_update: List[Dict[str, str]], release_date: try: with open(release_log_path, "r") as f: existing_content = f.read() - + lines = existing_content.split("\n") - + new_release = f"\n## {release_date}\n\n" new_release += "### Packages included\n\n" new_release += f"- {package_name}-{version}\n" - + lines.insert(1, new_release) updated_content = "\n".join(lines) - + with open(release_log_path, "w") as f: f.write(updated_content) - + logger.info(f"Updated release log for {package_name}") except Exception as e: logger.error(f"Failed to update release log for {package_name}: {e}") result.append(package_name) - # TODO AKA link pointing to new release logs needs to happen - + # TODO AKA link pointing to new release logs needs to happen + return result + if __name__ == "__main__": parser = argparse.ArgumentParser( description="Update conda package files and versions for release." ) + parser.add_argument( + "--verbose", + action="store_true", + help="Enable debug logging", + ) args = parser.parse_args() - args.debug = True # TODO remove this configure_logging(args) old_date, new_version = update_conda_version() @@ -826,7 +845,9 @@ def update_release_logs(packages_to_update: List[Dict[str, str]], release_date: new_mgmt_plane_results = add_new_mgmt_plane_packages(new_mgmt_plane_packages) # add/update release logs - release_log_results = update_release_logs(outdated_packages + new_packages, new_version) + release_log_results = update_release_logs( + outdated_packages + new_packages, new_version + ) print("=== REPORT ===") From 71e99ccaf2a3a4cb734b0dba6f1e5f3a7b82a685 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Tue, 6 Jan 2026 11:49:26 -0800 Subject: [PATCH 029/112] initial mappings --- conda/conda_release_groups.py | 125 ++++++++++++++++++++++++++++++++++ conda/update_conda_files.py | 86 +++++++++++------------ 2 files changed, 169 insertions(+), 42 deletions(-) create mode 100644 conda/conda_release_groups.py diff --git a/conda/conda_release_groups.py b/conda/conda_release_groups.py new file mode 100644 index 000000000000..4fac6107f892 --- /dev/null +++ b/conda/conda_release_groups.py @@ -0,0 +1,125 @@ +""" +Mapping of Azure SDK package names to their release group, used for Conda +release file updates in update_conda_files.py + +New grouped packages should be registered before using the script +to update for Conda releases. + +Packages that are not listed here are treated as standalone packages, +each forming their own release group (excluding mgmt packages, which will +by default be grouped). + +Packages that are grouped together will: + 1. Share a single release log file (e.g., azure-communication.md for all communication packages) + 2. Be listed under one CondaArtifact entry in conda-sdk-client.yml + 3. Be released together under a single release parameter +""" + +RELEASE_GROUPS = { + # Core + "azure-core": { + "packages": ["azure-core", "azure-mgmt-core", "azure-common"], + "common_root": "azure", + "service": "core", + }, + + # Communication + "azure-communication": { + "packages": [ + "azure-communication-chat", + "azure-communication-email", + "azure-communication-identity", + "azure-communication-phonenumbers", + "azure-communication-sms", + "azure-communication-callautomation", + "azure-communication-rooms", + "azure-communication-jobrouter", + "azure-communication-messages", + ], + "common_root": "azure/communication", + "service": "communication", + }, + + # Storage + "azure-storage": { + "packages": [ + "azure-storage-blob", + "azure-storage-queue", + "azure-storage-file-share", + "azure-storage-file-datalake", + ], + "common_root": "azure/storage", + "service": "storage", + }, + + # Schema Registry + "azure-schemaregistry": { + "packages": [ + "azure-schemaregistry", + "azure-schemaregistry-avroencoder", + ], + "common_root": "azure/schemaregistry", + "service": "schemaregistry", + }, + + # Event Hub + "azure-eventhub": { + "packages": [ + "azure-eventhub", + "azure-eventhub-checkpointstoreblob", + "azure-eventhub-checkpointstoreblob-aio", + ], + "common_root": "azure/eventhub", + "service": "eventhub", + }, + + "azure-keyvault": { + "packages": ["azure-keyvault-secrets", "azure-keyvault-keys", "azure-keyvault-certificates"], + "common_root": "azure/keyvault", + "service": "keyvault", + }, + + # Packages with common root exceptions + + "msrest": { + "packages": ["msrest"], + "common_root": None + }, + + "msal": { + "packages": ["msal"], + "common_root": None + }, + + "msal-extensions": { + "packages": ["msal-extensions"], + "common_root": "msal", + }, +} + + +# Reverse mapping: package name -> release group name +def get_package_to_group_mapping(): + mapping = {} + for group_name, group_info in RELEASE_GROUPS.items(): + for package in group_info["packages"]: + mapping[package] = group_name + return mapping + +def get_release_group(package_name: str, package_to_group: dict) -> str: + """ + Get the release group name for a given package. + + :param package_name: The package name (e.g., "azure-core", "azure-communication-chat") + :return: The release group name (e.g., "azure-core", "azure-communication") + """ + return package_to_group.get(package_name, package_name) + +def get_package_group_data(group_name: str) -> dict: + """ + Get all packages that belong to a release group. + + :param group_name: The release group name + :return: The group data dictionary + """ + return RELEASE_GROUPS.get(group_name, {}) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 68e164a9a06d..07d02dd813a5 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -720,10 +720,12 @@ def add_new_mgmt_plane_packages(new_packages: List[Dict[str, str]]) -> List[str] def update_release_logs( packages_to_update: List[Dict[str, str]], release_date: str ) -> List[str]: - """Add and update release logs for conda packages.""" + """ + Add and update release logs for conda packages. Release log includes all version changes and new packages. + """ result = [] - # update mgmt release log separately + # TODO update mgmt release log separately mgmt_release_log_path = os.path.join(CONDA_RELEASE_LOGS_DIR, "azure-mgmt.md") # update data plane release logs @@ -838,43 +840,43 @@ def update_release_logs( outdated_packages, new_data_plane_packages, new_mgmt_plane_packages ) - # handle new data plane libraries - new_data_plane_results = add_new_data_plane_packages(new_data_plane_packages) - - # handle new mgmt plane libraries - new_mgmt_plane_results = add_new_mgmt_plane_packages(new_mgmt_plane_packages) - - # add/update release logs - release_log_results = update_release_logs( - outdated_packages + new_packages, new_version - ) - - print("=== REPORT ===") - - if conda_sdk_client_pkgs_result: - print( - "The following packages may require manual adjustments in conda-sdk-client.yml:" - ) - for pkg_name in conda_sdk_client_pkgs_result: - print(f"- {pkg_name}") - - if new_data_plane_results: - print( - "\nThe following new data plane packages may require manual meta.yaml creation or adjustments:" - ) - for pkg_name in new_data_plane_results: - print(f"- {pkg_name}") - - if new_mgmt_plane_results: - print( - "\nThe following new management plane packages may require manual adjustments in azure-mgmt/meta.yaml:" - ) - for pkg_name in new_mgmt_plane_results: - print(f"- {pkg_name}") - - if release_log_results: - print( - "\nThe following packages may require manual adjustments in release logs:" - ) - for pkg_name in release_log_results: - print(f"- {pkg_name}") + # # handle new data plane libraries + # new_data_plane_results = add_new_data_plane_packages(new_data_plane_packages) + + # # handle new mgmt plane libraries + # new_mgmt_plane_results = add_new_mgmt_plane_packages(new_mgmt_plane_packages) + + # # add/update release logs + # release_log_results = update_release_logs( + # outdated_packages + new_packages, new_version + # ) + + # print("=== REPORT ===") + + # if conda_sdk_client_pkgs_result: + # print( + # "The following packages may require manual adjustments in conda-sdk-client.yml:" + # ) + # for pkg_name in conda_sdk_client_pkgs_result: + # print(f"- {pkg_name}") + + # if new_data_plane_results: + # print( + # "\nThe following new data plane packages may require manual meta.yaml creation or adjustments:" + # ) + # for pkg_name in new_data_plane_results: + # print(f"- {pkg_name}") + + # if new_mgmt_plane_results: + # print( + # "\nThe following new management plane packages may require manual adjustments in azure-mgmt/meta.yaml:" + # ) + # for pkg_name in new_mgmt_plane_results: + # print(f"- {pkg_name}") + + # if release_log_results: + # print( + # "\nThe following packages may require manual adjustments in release logs:" + # ) + # for pkg_name in release_log_results: + # print(f"- {pkg_name}") From e5f45a906b1ca6420d04374768e645e438c5859b Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Tue, 6 Jan 2026 11:55:06 -0800 Subject: [PATCH 030/112] minor --- conda/conda_release_groups.py | 44 ++++++++++++++++------------------- 1 file changed, 20 insertions(+), 24 deletions(-) diff --git a/conda/conda_release_groups.py b/conda/conda_release_groups.py index 4fac6107f892..9c76d60267a7 100644 --- a/conda/conda_release_groups.py +++ b/conda/conda_release_groups.py @@ -22,8 +22,7 @@ "common_root": "azure", "service": "core", }, - - # Communication + # Communication "azure-communication": { "packages": [ "azure-communication-chat", @@ -39,7 +38,6 @@ "common_root": "azure/communication", "service": "communication", }, - # Storage "azure-storage": { "packages": [ @@ -51,8 +49,7 @@ "common_root": "azure/storage", "service": "storage", }, - - # Schema Registry + # Schema Registry "azure-schemaregistry": { "packages": [ "azure-schemaregistry", @@ -61,7 +58,6 @@ "common_root": "azure/schemaregistry", "service": "schemaregistry", }, - # Event Hub "azure-eventhub": { "packages": [ @@ -72,29 +68,27 @@ "common_root": "azure/eventhub", "service": "eventhub", }, - "azure-keyvault": { - "packages": ["azure-keyvault-secrets", "azure-keyvault-keys", "azure-keyvault-certificates"], + "packages": [ + "azure-keyvault-administration", + "azure-keyvault-secrets", + "azure-keyvault-keys", + "azure-keyvault-certificates", + ], "common_root": "azure/keyvault", "service": "keyvault", }, - - # Packages with common root exceptions - - "msrest": { - "packages": ["msrest"], - "common_root": None - }, - - "msal": { - "packages": ["msal"], - "common_root": None - }, - + # Packages with common root exceptions + "msrest": {"packages": ["msrest"], "common_root": None}, + "msal": {"packages": ["msal"], "common_root": None}, "msal-extensions": { "packages": ["msal-extensions"], "common_root": "msal", }, + "azure-ai-vision": { + "packages": ["azure-ai-vision-imageanalysis"], + "common_root": "azure/vision", + }, } @@ -106,20 +100,22 @@ def get_package_to_group_mapping(): mapping[package] = group_name return mapping + def get_release_group(package_name: str, package_to_group: dict) -> str: """ Get the release group name for a given package. - + :param package_name: The package name (e.g., "azure-core", "azure-communication-chat") :return: The release group name (e.g., "azure-core", "azure-communication") """ return package_to_group.get(package_name, package_name) + def get_package_group_data(group_name: str) -> dict: """ Get all packages that belong to a release group. - + :param group_name: The release group name - :return: The group data dictionary + :return: The group data dictionary """ return RELEASE_GROUPS.get(group_name, {}) From a5b6ee4f7fe30d34dcea3103a31b59e4dd06f977 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Tue, 6 Jan 2026 14:26:10 -0800 Subject: [PATCH 031/112] refactor progress --- conda/update_conda_files.py | 139 +++++++++++++++++++++--------------- 1 file changed, 82 insertions(+), 57 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 07d02dd813a5..353a746f4cda 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -14,6 +14,8 @@ from ci_tools.parsing import ParsedSetup, extract_package_metadata from typing import Dict, List, Optional, Tuple +from conda_release_groups import get_package_group_data, get_release_group, get_package_to_group_mapping + # paths ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) SDK_DIR = os.path.join(ROOT_DIR, "sdk") @@ -228,16 +230,18 @@ def increase_indent(self, flow=False, indentless=False): def update_conda_sdk_client_yml( - packages_to_update: List[Dict[str, str]], - new_data_plane_packages: List[Dict[str, str]], - new_mgmt_plane_packages: List[Dict[str, str]], + package_dict: Dict[str, Dict[str, str]], + packages_to_update: List[str], + new_data_plane_packages: List[str], + new_mgmt_plane_packages: List[str], ) -> List[str]: """ Update outdated package versions and add new entries in conda-sdk-client.yml file - :param packages_to_update: List of package rows from the CSV that need updates. - :param new_data_plane_packages: List of new data plane package rows from the CSV. - :param new_mgmt_plane_packages: List of new management plane package rows from the CSV. + :param package_dict: Dictionary mapping package names to their CSV row data. + :param packages_to_update: List of package names that need version updates. + :param new_data_plane_packages: List of new data plane package names. + :param new_mgmt_plane_packages: List of new management plane package names. :return: List of package names that were not updated or added and may require manual action. """ updated_count = 0 @@ -251,15 +255,15 @@ def update_conda_sdk_client_yml( 0 ]["steps"][0]["parameters"]["CondaArtifacts"] - # update outdated package versions + # === Update outdated package versions === logger.info( f"Detected {len(packages_to_update)} outdated package versions to update in conda-sdk-client.yml" ) package_index = build_package_index(conda_artifacts) - for pkg in packages_to_update: - pkg_name = pkg.get(PACKAGE_COL) + for pkg_name in packages_to_update: + pkg = package_dict.get(pkg_name, {}) new_version = pkg.get(VERSION_GA_COL) if pkg_name in package_index: artifact_idx, checkout_idx = package_index[pkg_name] @@ -314,27 +318,18 @@ def update_conda_sdk_client_yml( ) result.append(pkg_name) - # Add new data plane packages + # === Add new data plane packages === logger.info( f"Detected {len(new_data_plane_packages)} new data plane packages to add to conda-sdk-client.yml" ) - # TODO when do we batch packages together that have the same root repoPath?? - # e.g. 'core' encompasses azure-core and azure-common: - # there's only 1 parameter release_azure_core, and those packages are grouped under the same checkout - - # however, 'ai' packages have multiple params - # e.g. azure-ai-agents and azure-ai-projects are separated - + package_to_group = get_package_to_group_mapping() parameters = conda_client_data["parameters"] + existing_parameter_names = [p.get("name") for p in parameters] - for pkg in new_data_plane_packages: - package_name = pkg.get(PACKAGE_COL) - - if not package_name: - logger.warning("Skipping package with missing name") - continue + for package_name in new_data_plane_packages: + pkg = package_dict.get(package_name, {}) # TODO commented out for testing purposes only # if package_name in package_index: @@ -344,27 +339,50 @@ def update_conda_sdk_client_yml( # result.append(package_name) # continue - # TODO what is the case where we batch multiple subservices under one??? - - release_name = f"release_{package_name.replace('-', '_')}" - new_parameter = { - "name": release_name, - "displayName": package_name, - "type": "boolean", - "default": True, - } + # check if package belongs to a release group + group_name = get_release_group(package_name, package_to_group) + group_data = get_package_group_data(group_name) - parameters.append(new_parameter) - - # also add to CondaArtifacts - common_root, service_name = determine_service_info(package_name) + if group_data: + logger.info( + f"Package {package_name} belongs to release group {group_name}" + ) + release_name = f"release_{group_name.replace('-', '_')}" + display_name = group_name + else: + # package is released individually + release_name = f"release_{package_name.replace('-', '_')}" + display_name = package_name + + # add new release parameter if not exists + if release_name not in existing_parameter_names: + new_parameter = { + "name": release_name, + "displayName": display_name, + "type": "boolean", + "default": True, + } + parameters.append(new_parameter) + + # add to CondaArtifacts + common_root, service_name = determine_service_info(pkg, package_to_group) + + # build checkout packages + if group_data: + checkout_packages = [] + for grouped_pkg in group_data["packages"]: + checkout_packages.append( + {"package": grouped_pkg, "version": pkg.get(VERSION_GA_COL)} + ) + else: + checkout_packages = [{"package": package_name, "version": pkg.get(VERSION_GA_COL)}] new_artifact_entry = { "name": package_name, "common_root": common_root, "service": service_name, "in_batch": f"${{{{ parameters.{release_name} }}}}", - "checkout": [{"package": package_name, "version": pkg.get(VERSION_GA_COL)}], + "checkout": checkout_packages, } # append before azure-mgmt entry @@ -382,12 +400,8 @@ def update_conda_sdk_client_yml( # assumes azure-mgmt will always be the last CondaArtifacts entry azure_mgmt_artifact_checkout = conda_artifacts[-1]["checkout"] - for pkg in new_mgmt_plane_packages: - package_name = pkg.get(PACKAGE_COL) - - if not package_name: - logger.warning("Skipping package with missing name") - continue + for package_name in new_mgmt_plane_packages: + pkg = package_dict.get(package_name, {}) # TODO commented out for testing purposes only # if package_name in package_index: @@ -440,27 +454,31 @@ def get_package_path(package_name: str) -> str: return matches[0] -def determine_service_info(pkg: Dict[str, str]) -> Tuple[str, str]: - # TODO how to actually determine?, this is mostly placeholder +def determine_service_info(pkg: Dict[str, str], package_to_group: dict) -> Tuple[str, str]: """ - Dynamically determine the common_root and service name based on package name and directory structure. + Returns the common root and service name for the given package. :param package_name: The name of the package (e.g., "azure-ai-textanalytics"). + :param package_to_group: Mapping of package names to release group names. """ package_name = pkg.get(PACKAGE_COL, "") - service_name = pkg.get(REPO_PATH_COL, "") - - # TODO not all existing packages follow this pattern - # - some packages in the yml don't have a common_root field at all?? - # - communication has common root of azure/communication instead of azure - # - azure-ai-voicelive's service name is currently projects? + service_name = pkg.get(REPO_PATH_COL, "").lower() if not service_name: service_name = os.path.basename(os.path.dirname(get_package_path(package_name))) - # TODO Determine common_root common_root = "azure" + # check for exceptions to the pattern + group_name = get_release_group(package_name, package_to_group) + group_data = get_package_group_data(group_name) + + if group_data: + if group_data.get("service"): + service_name = group_data["service"] + if group_data.get("common_root"): + common_root = group_data["common_root"] + return common_root, service_name @@ -819,8 +837,8 @@ def update_release_logs( logger.error("No packages found in CSV data.") exit(1) - # Only ship GA packages - packages = [pkg for pkg in packages if pkg.get(VERSION_GA_COL)] + # Only ship GA packages that are not deprecated + packages = [pkg for pkg in packages if (pkg.get(VERSION_GA_COL) and pkg.get(LATEST_GA_DATE_COL))] logger.info(f"Filtered to {len(packages)} GA packages") outdated_packages = [ @@ -833,11 +851,18 @@ def update_release_logs( new_packages ) + # Extract package names from the filtered lists + outdated_package_names = [pkg.get(PACKAGE_COL, "") for pkg in outdated_packages if pkg.get(PACKAGE_COL)] + new_data_plane_names = [pkg.get(PACKAGE_COL, "") for pkg in new_data_plane_packages if pkg.get(PACKAGE_COL)] + new_mgmt_plane_names = [pkg.get(PACKAGE_COL, "") for pkg in new_mgmt_plane_packages if pkg.get(PACKAGE_COL)] + + # map package name to csv row for easy lookup + package_dict = {pkg.get(PACKAGE_COL, ""): pkg for pkg in packages} + # update conda-sdk-client.yml # TODO handle packages missing from conda-sdk-client that aren't new relative to the last release... - conda_sdk_client_pkgs_result = update_conda_sdk_client_yml( - outdated_packages, new_data_plane_packages, new_mgmt_plane_packages + package_dict, outdated_package_names, new_data_plane_names, new_mgmt_plane_names ) # # handle new data plane libraries From 4bd6c7dde49a4ae75ebd3f9aca81620a19f0c54b Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Tue, 6 Jan 2026 14:57:40 -0800 Subject: [PATCH 032/112] conda sdk client refactor almost complete --- conda/conda_release_groups.py | 2 +- conda/update_conda_files.py | 58 +++++++++++++++++++++++++---------- 2 files changed, 42 insertions(+), 18 deletions(-) diff --git a/conda/conda_release_groups.py b/conda/conda_release_groups.py index 9c76d60267a7..f0db324fc0a4 100644 --- a/conda/conda_release_groups.py +++ b/conda/conda_release_groups.py @@ -116,6 +116,6 @@ def get_package_group_data(group_name: str) -> dict: Get all packages that belong to a release group. :param group_name: The release group name - :return: The group data dictionary + :return: The group data dictionary, or empty dict if not found """ return RELEASE_GROUPS.get(group_name, {}) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 353a746f4cda..90621af97a0d 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -326,7 +326,10 @@ def update_conda_sdk_client_yml( package_to_group = get_package_to_group_mapping() parameters = conda_client_data["parameters"] + + # quick look up for handling grouped package releases existing_parameter_names = [p.get("name") for p in parameters] + existing_artifact_names = [a.get("name") for a in conda_artifacts] for package_name in new_data_plane_packages: pkg = package_dict.get(package_name, {}) @@ -344,6 +347,7 @@ def update_conda_sdk_client_yml( group_data = get_package_group_data(group_name) if group_data: + # package is part of a release group logger.info( f"Package {package_name} belongs to release group {group_name}" ) @@ -366,30 +370,50 @@ def update_conda_sdk_client_yml( # add to CondaArtifacts common_root, service_name = determine_service_info(pkg, package_to_group) - + + # TODO check this logic with a dummy new package group # build checkout packages if group_data: checkout_packages = [] - for grouped_pkg in group_data["packages"]: - checkout_packages.append( - {"package": grouped_pkg, "version": pkg.get(VERSION_GA_COL)} - ) + for grouped_pkg_name in group_data["packages"]: + curr_pkg = package_dict.get(grouped_pkg_name, {}) + if not curr_pkg: + logger.error( + f"Package {grouped_pkg_name} listed in group {group_name} not found in CSV data, skipping" + ) + result.append(grouped_pkg_name) + continue + curr_version = curr_pkg.get(VERSION_GA_COL) + if curr_version: + checkout_packages.append( + {"package": grouped_pkg_name, "version": curr_version} + ) + else: + logger.error( + f"Package {grouped_pkg_name} in group {group_name} is missing version info, skipping" + ) + result.append(grouped_pkg_name) else: checkout_packages = [{"package": package_name, "version": pkg.get(VERSION_GA_COL)}] - new_artifact_entry = { - "name": package_name, - "common_root": common_root, - "service": service_name, - "in_batch": f"${{{{ parameters.{release_name} }}}}", - "checkout": checkout_packages, - } + if group_name not in existing_artifact_names: + new_artifact_entry = { + "name": group_name if group_data else package_name, + "common_root": common_root, + "service": service_name, + "in_batch": f"${{{{ parameters.{release_name} }}}}", + "checkout": checkout_packages, + } - # append before azure-mgmt entry - conda_artifacts.insert(len(conda_artifacts) - 1, new_artifact_entry) + # append before azure-mgmt entry + conda_artifacts.insert(len(conda_artifacts) - 1, new_artifact_entry) - added_count += 1 - logger.info(f"Added new data plane package: {package_name}") + added_count += 1 + logger.info(f"Added new data plane package: {package_name}") + else: + logger.info( + f"CondaArtifact for {group_name if group_data else package_name} already exists in conda-sdk-client.yml, skipping addition" + ) # Add new mgmt plane packages @@ -426,7 +450,7 @@ def update_conda_sdk_client_yml( # TODO note this dump doesn't preserve some quotes like around # displayName: 'azure-developer-loadtesting' but i don't think those functionally necessary? - # double check that this is ok, esp for URLs... ^ + # double check that this is ok, esp for URLs... ^ if updated_count > 0 or added_count > 0: with open(CONDA_CLIENT_YAML_PATH, "w") as file: From 0426e10fc1aa0c001d15a826c406977826369c56 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Tue, 6 Jan 2026 15:26:31 -0800 Subject: [PATCH 033/112] this here seems to work for new batched packages --- conda/update_conda_files.py | 29 ++++++++++++++++++++++------- 1 file changed, 22 insertions(+), 7 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 90621af97a0d..932812bb561a 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -329,7 +329,7 @@ def update_conda_sdk_client_yml( # quick look up for handling grouped package releases existing_parameter_names = [p.get("name") for p in parameters] - existing_artifact_names = [a.get("name") for a in conda_artifacts] + existing_artifact_names = {a.get("name"): idx for idx, a in enumerate(conda_artifacts)} for package_name in new_data_plane_packages: pkg = package_dict.get(package_name, {}) @@ -371,7 +371,6 @@ def update_conda_sdk_client_yml( # add to CondaArtifacts common_root, service_name = determine_service_info(pkg, package_to_group) - # TODO check this logic with a dummy new package group # build checkout packages if group_data: checkout_packages = [] @@ -412,8 +411,19 @@ def update_conda_sdk_client_yml( logger.info(f"Added new data plane package: {package_name}") else: logger.info( - f"CondaArtifact for {group_name if group_data else package_name} already exists in conda-sdk-client.yml, skipping addition" + f"CondaArtifact for {group_name if group_data else package_name} already exists in conda-sdk-client.yml" ) + curr_artifact_checkout = conda_artifacts[existing_artifact_names[group_name]]["checkout"] + packages_in_artifact = {item["package"] for item in curr_artifact_checkout} + + # account for adding a single new package to an existing group + for pkg_entry in checkout_packages: + if pkg_entry["package"] not in packages_in_artifact: + curr_artifact_checkout.append(pkg_entry) + added_count += 1 + logger.info( + f"Added package {pkg_entry['package']} to existing CondaArtifact {group_name}" + ) # Add new mgmt plane packages @@ -485,12 +495,9 @@ def determine_service_info(pkg: Dict[str, str], package_to_group: dict) -> Tuple :param package_name: The name of the package (e.g., "azure-ai-textanalytics"). :param package_to_group: Mapping of package names to release group names. """ + # defaults package_name = pkg.get(PACKAGE_COL, "") service_name = pkg.get(REPO_PATH_COL, "").lower() - - if not service_name: - service_name = os.path.basename(os.path.dirname(get_package_path(package_name))) - common_root = "azure" # check for exceptions to the pattern @@ -503,6 +510,9 @@ def determine_service_info(pkg: Dict[str, str], package_to_group: dict) -> Tuple if group_data.get("common_root"): common_root = group_data["common_root"] + if not service_name: + service_name = os.path.basename(os.path.dirname(get_package_path(package_name))) + return common_root, service_name @@ -883,6 +893,11 @@ def update_release_logs( # map package name to csv row for easy lookup package_dict = {pkg.get(PACKAGE_COL, ""): pkg for pkg in packages} + # TEST + new_data_plane_names = ["test2"] + package_dict["test2"] = {"Package": "test2", "VersionGA": "1.0.0", "LatestGADate": "1/1/2026"} + package_dict["test1"] = {"Package": "test1", "VersionGA": "3.0.0", "LatestGADate": "1/1/2026"} + # update conda-sdk-client.yml # TODO handle packages missing from conda-sdk-client that aren't new relative to the last release... conda_sdk_client_pkgs_result = update_conda_sdk_client_yml( From 5cf5d43b188392449bed533bb8d75af246fbecc7 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Tue, 6 Jan 2026 17:03:07 -0800 Subject: [PATCH 034/112] some progress in dataplane yml --- conda/update_conda_files.py | 127 ++++++++++++++++++++---------------- 1 file changed, 70 insertions(+), 57 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 932812bb561a..d576d4c03e45 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -416,7 +416,7 @@ def update_conda_sdk_client_yml( curr_artifact_checkout = conda_artifacts[existing_artifact_names[group_name]]["checkout"] packages_in_artifact = {item["package"] for item in curr_artifact_checkout} - # account for adding a single new package to an existing group + # account for adding new packages to an existing group for pkg_entry in checkout_packages: if pkg_entry["package"] not in packages_in_artifact: curr_artifact_checkout.append(pkg_entry) @@ -437,13 +437,12 @@ def update_conda_sdk_client_yml( for package_name in new_mgmt_plane_packages: pkg = package_dict.get(package_name, {}) - # TODO commented out for testing purposes only - # if package_name in package_index: - # logger.warning( - # f"New package {package_name} already exists in conda-sdk-client.yml, skipping addition" - # ) - # result.append(package_name) - # continue + if package_name in package_index: + logger.warning( + f"New package {package_name} already exists in conda-sdk-client.yml, skipping addition" + ) + result.append(package_name) + continue new_mgmt_entry = { "package": package_name, @@ -562,55 +561,71 @@ def get_package_requirements(parsed: ParsedSetup) -> Tuple[List[str], List[str]] return list(host_requirements), list(run_requirements) - -def get_package_metadata(package_name: str, package_path: str) -> Dict[str, str]: - """Extract metadata for the about section from package.""" - metadata = extract_package_metadata(package_path) +def get_package_metadata(package_name: str, package_path: str) -> Tuple[str, str, str]: + """Extract package metadata for about section in meta.yaml.""" + pkg_metadata = extract_package_metadata(package_path) service_dir = os.path.basename(os.path.dirname(package_path)) - home_url = f"https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/{service_dir}/{package_name}" - # TODO check this - if metadata and metadata.get("description"): - summary = metadata["description"] + # TODO check correctness of this + if pkg_metadata and pkg_metadata.get("description"): + summary = pkg_metadata["description"] else: summary = f"Microsoft Azure {package_name.replace('azure-', '').replace('-', ' ').title()} Client Library for Python" - + # TODO definitely need to check if this is actually correct conda_url = f"https://aka.ms/azsdk/conda/releases/{service_dir}" + description = f"This is the {summary}.\n Please see {conda_url} for version details." - return { - "home": home_url, - "summary": summary, - "description": f"This is the {summary}.\n Please see {conda_url} for version details.", - } - + return home_url, summary, description + def generate_data_plane_meta_yaml( - package_name: str, download_uri: Optional[str] = None -) -> str: - """Generate the meta.yaml content for a data plane package.""" + package_dict: Dict[str, Dict[str, str]], package_name: str, group_name: Optional[str], group_data: Optional[dict]) -> str: + """ + Generate the meta.yaml content for a data plane package or release group. + + :param package_dict: Dictionary mapping package names to their CSV row data. + :param package_name: The name of the package to generate meta.yaml for. + :param group: Whether the meta.yaml is for a single package or group. + """ # TODO is it correct that the env var name is arbitrary and replaced in conda_functions.py? src_distr_name = package_name.split("-")[-1].upper() src_distribution_env_var = f"{src_distr_name}_SOURCE_DISTRIBUTION" - package_path = get_package_path(package_name) + # handle grouped packages + if group_name and group_data: + host_reqs = set() + run_reqs = set() + pkg_imports = [] + for pkg in group_data["packages"]: + package_path = get_package_path(pkg) + parsed_setup = ParsedSetup.from_path(package_path) + pkg_host_reqs, pkg_run_reqs = get_package_requirements(parsed_setup) + host_reqs.update(pkg_host_reqs) + run_reqs.update(pkg_run_reqs) + pkg_imports.append(pkg.replace("-", ".")) + host_reqs = list(host_reqs) + run_reqs = list(run_reqs) + + # TODO verify correctness + home_url, summary, description = get_package_metadata(group_name, get_package_path(group_data["packages"][0])) + else: + package_path = get_package_path(package_name) + parsed_setup = ParsedSetup.from_path(package_path) - parsed_setup = ParsedSetup.from_path(package_path) - host_reqs, run_reqs = get_package_requirements(parsed_setup) + host_reqs, run_reqs = get_package_requirements(parsed_setup) + pkg_imports = [package_name.replace("-", ".")] + # extract metadata for about section + home_url, summary, description = get_package_metadata(package_name, package_path) + # Format requirements with proper YAML indentation host_reqs_str = "\n - ".join(host_reqs) run_reqs_str = "\n - ".join(run_reqs) - - # TODO there can be subdirectory packages..... e.g. azure-ai-ml, may need more import logic - pkg_name_normalized = package_name.replace("-", ".") - - # Get package metadata for about section - metadata = get_package_metadata(package_name, package_path) - + pkg_imports_str = "\n - ".join(pkg_imports) meta_yaml_content = f"""{{% set name = "{package_name}" %}} package: @@ -633,16 +648,16 @@ def generate_data_plane_meta_yaml( test: imports: - - {pkg_name_normalized} + - {pkg_imports_str} about: - home: "{metadata['home']}" + home: "{home_url}" license: MIT license_family: MIT license_file: - summary: "{metadata['summary']}" + summary: "{summary}" description: | - {metadata['description']} + {description} doc_url: dev_url: @@ -653,27 +668,30 @@ def generate_data_plane_meta_yaml( return meta_yaml_content -def add_new_data_plane_packages(new_packages: List[Dict[str, str]]) -> List[str]: +def add_new_data_plane_packages(package_dict: Dict[str, Dict[str, str]], new_data_plane_names: List[str]) -> List[str]: """Create meta.yaml files for new data plane packages and add import tests.""" - if len(new_packages) == 0: + if len(new_data_plane_names) == 0: return [] - logger.info(f"Adding {len(new_packages)} new data plane packages") + logger.info(f"Adding {len(new_data_plane_names)} new data plane packages") result = [] - - for pkg in new_packages: - package_name = pkg.get(PACKAGE_COL) - if not package_name: - logger.warning("Skipping package with missing name") - continue - + + group_names_processed = set() + for package_name in new_data_plane_names: logger.info(f"Adding new data plane meta.yaml for: {package_name}") pkg_yaml_path = os.path.join(CONDA_RECIPES_DIR, package_name, "meta.yaml") os.makedirs(os.path.dirname(pkg_yaml_path), exist_ok=True) + group_name = get_release_group(package_name, get_package_to_group_mapping()) + group_data = get_package_group_data(group_name) + + if group_data and group_name in group_names_processed: + logger.info(f"Meta.yaml for group {group_name} already created, skipping {package_name}") + continue + try: - meta_yml = generate_data_plane_meta_yaml(package_name) + meta_yml = generate_data_plane_meta_yaml(package_dict,package_name, group_name, group_data) except Exception as e: logger.error( f"Failed to generate meta.yaml content for {package_name} and skipping, error: {e}" @@ -893,11 +911,6 @@ def update_release_logs( # map package name to csv row for easy lookup package_dict = {pkg.get(PACKAGE_COL, ""): pkg for pkg in packages} - # TEST - new_data_plane_names = ["test2"] - package_dict["test2"] = {"Package": "test2", "VersionGA": "1.0.0", "LatestGADate": "1/1/2026"} - package_dict["test1"] = {"Package": "test1", "VersionGA": "3.0.0", "LatestGADate": "1/1/2026"} - # update conda-sdk-client.yml # TODO handle packages missing from conda-sdk-client that aren't new relative to the last release... conda_sdk_client_pkgs_result = update_conda_sdk_client_yml( @@ -905,7 +918,7 @@ def update_release_logs( ) # # handle new data plane libraries - # new_data_plane_results = add_new_data_plane_packages(new_data_plane_packages) + new_data_plane_results = add_new_data_plane_packages(package_dict, new_data_plane_names) # # handle new mgmt plane libraries # new_mgmt_plane_results = add_new_mgmt_plane_packages(new_mgmt_plane_packages) From 5a0fcf76d8842ac09207a2028837f45169c374e0 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Tue, 6 Jan 2026 22:59:47 -0800 Subject: [PATCH 035/112] basic data plane yaml working for grouped? --- conda/update_conda_files.py | 140 ++++++++++++++++++++++++------------ 1 file changed, 96 insertions(+), 44 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index d576d4c03e45..58e02635f578 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -14,7 +14,11 @@ from ci_tools.parsing import ParsedSetup, extract_package_metadata from typing import Dict, List, Optional, Tuple -from conda_release_groups import get_package_group_data, get_release_group, get_package_to_group_mapping +from conda_release_groups import ( + get_package_group_data, + get_release_group, + get_package_to_group_mapping, +) # paths ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) @@ -329,7 +333,9 @@ def update_conda_sdk_client_yml( # quick look up for handling grouped package releases existing_parameter_names = [p.get("name") for p in parameters] - existing_artifact_names = {a.get("name"): idx for idx, a in enumerate(conda_artifacts)} + existing_artifact_names = { + a.get("name"): idx for idx, a in enumerate(conda_artifacts) + } for package_name in new_data_plane_packages: pkg = package_dict.get(package_name, {}) @@ -348,16 +354,14 @@ def update_conda_sdk_client_yml( if group_data: # package is part of a release group - logger.info( - f"Package {package_name} belongs to release group {group_name}" - ) + logger.info(f"Package {package_name} belongs to release group {group_name}") release_name = f"release_{group_name.replace('-', '_')}" display_name = group_name else: # package is released individually release_name = f"release_{package_name.replace('-', '_')}" display_name = package_name - + # add new release parameter if not exists if release_name not in existing_parameter_names: new_parameter = { @@ -370,8 +374,8 @@ def update_conda_sdk_client_yml( # add to CondaArtifacts common_root, service_name = determine_service_info(pkg, package_to_group) - - # build checkout packages + + # build checkout packages if group_data: checkout_packages = [] for grouped_pkg_name in group_data["packages"]: @@ -393,7 +397,9 @@ def update_conda_sdk_client_yml( ) result.append(grouped_pkg_name) else: - checkout_packages = [{"package": package_name, "version": pkg.get(VERSION_GA_COL)}] + checkout_packages = [ + {"package": package_name, "version": pkg.get(VERSION_GA_COL)} + ] if group_name not in existing_artifact_names: new_artifact_entry = { @@ -413,7 +419,9 @@ def update_conda_sdk_client_yml( logger.info( f"CondaArtifact for {group_name if group_data else package_name} already exists in conda-sdk-client.yml" ) - curr_artifact_checkout = conda_artifacts[existing_artifact_names[group_name]]["checkout"] + curr_artifact_checkout = conda_artifacts[ + existing_artifact_names[group_name] + ]["checkout"] packages_in_artifact = {item["package"] for item in curr_artifact_checkout} # account for adding new packages to an existing group @@ -487,7 +495,9 @@ def get_package_path(package_name: str) -> str: return matches[0] -def determine_service_info(pkg: Dict[str, str], package_to_group: dict) -> Tuple[str, str]: +def determine_service_info( + pkg: Dict[str, str], package_to_group: dict +) -> Tuple[str, str]: """ Returns the common root and service name for the given package. @@ -561,6 +571,7 @@ def get_package_requirements(parsed: ParsedSetup) -> Tuple[List[str], List[str]] return list(host_requirements), list(run_requirements) + def get_package_metadata(package_name: str, package_path: str) -> Tuple[str, str, str]: """Extract package metadata for about section in meta.yaml.""" pkg_metadata = extract_package_metadata(package_path) @@ -570,19 +581,25 @@ def get_package_metadata(package_name: str, package_path: str) -> Tuple[str, str # TODO check correctness of this if pkg_metadata and pkg_metadata.get("description"): - summary = pkg_metadata["description"] + summary = pkg_metadata["description"] else: summary = f"Microsoft Azure {package_name.replace('azure-', '').replace('-', ' ').title()} Client Library for Python" - + # TODO definitely need to check if this is actually correct conda_url = f"https://aka.ms/azsdk/conda/releases/{service_dir}" - description = f"This is the {summary}.\n Please see {conda_url} for version details." + description = ( + f"This is the {summary}.\n Please see {conda_url} for version details." + ) return home_url, summary, description - + def generate_data_plane_meta_yaml( - package_dict: Dict[str, Dict[str, str]], package_name: str, group_name: Optional[str], group_data: Optional[dict]) -> str: + package_dict: Dict[str, Dict[str, str]], + package_name: str, + group_name: Optional[str], + group_data: Optional[dict], +) -> str: """ Generate the meta.yaml content for a data plane package or release group. @@ -595,33 +612,44 @@ def generate_data_plane_meta_yaml( src_distr_name = package_name.split("-")[-1].upper() src_distribution_env_var = f"{src_distr_name}_SOURCE_DISTRIBUTION" - # handle grouped packages + # TODO not sure if this is the best way to get these requirements + # TODO don't think this covers all possible import tests, e.g. azure.eventgrid, azure.eventgrid.aio <- when would I add that? if group_name and group_data: + # handle grouped packages + logger.info( + f"Generating meta.yaml for release group {group_name} including packages: {group_data['packages']}" + ) host_reqs = set() run_reqs = set() pkg_imports = [] + for pkg in group_data["packages"]: package_path = get_package_path(pkg) parsed_setup = ParsedSetup.from_path(package_path) + pkg_host_reqs, pkg_run_reqs = get_package_requirements(parsed_setup) host_reqs.update(pkg_host_reqs) run_reqs.update(pkg_run_reqs) + pkg_imports.append(pkg.replace("-", ".")) host_reqs = list(host_reqs) run_reqs = list(run_reqs) - # TODO verify correctness - home_url, summary, description = get_package_metadata(group_name, get_package_path(group_data["packages"][0])) + home_url, summary, description = get_package_metadata( + group_name, get_package_path(group_data["packages"][0]) + ) else: + logger.info(f"Generating meta.yaml for package {package_name}") package_path = get_package_path(package_name) parsed_setup = ParsedSetup.from_path(package_path) host_reqs, run_reqs = get_package_requirements(parsed_setup) pkg_imports = [package_name.replace("-", ".")] - # extract metadata for about section - home_url, summary, description = get_package_metadata(package_name, package_path) - + home_url, summary, description = get_package_metadata( + package_name, package_path + ) + # Format requirements with proper YAML indentation host_reqs_str = "\n - ".join(host_reqs) run_reqs_str = "\n - ".join(run_reqs) @@ -668,14 +696,16 @@ def generate_data_plane_meta_yaml( return meta_yaml_content -def add_new_data_plane_packages(package_dict: Dict[str, Dict[str, str]], new_data_plane_names: List[str]) -> List[str]: +def add_new_data_plane_packages( + package_dict: Dict[str, Dict[str, str]], new_data_plane_names: List[str] +) -> List[str]: """Create meta.yaml files for new data plane packages and add import tests.""" if len(new_data_plane_names) == 0: return [] logger.info(f"Adding {len(new_data_plane_names)} new data plane packages") result = [] - + group_names_processed = set() for package_name in new_data_plane_names: logger.info(f"Adding new data plane meta.yaml for: {package_name}") @@ -687,11 +717,17 @@ def add_new_data_plane_packages(package_dict: Dict[str, Dict[str, str]], new_dat group_data = get_package_group_data(group_name) if group_data and group_name in group_names_processed: - logger.info(f"Meta.yaml for group {group_name} already created, skipping {package_name}") + logger.info( + f"Meta.yaml for group {group_name} already created, skipping {package_name}" + ) continue try: - meta_yml = generate_data_plane_meta_yaml(package_dict,package_name, group_name, group_data) + meta_yml = generate_data_plane_meta_yaml( + package_dict, package_name, group_name, group_data + ) + if group_data: + group_names_processed.add(group_name) except Exception as e: logger.error( f"Failed to generate meta.yaml content for {package_name} and skipping, error: {e}" @@ -890,7 +926,11 @@ def update_release_logs( exit(1) # Only ship GA packages that are not deprecated - packages = [pkg for pkg in packages if (pkg.get(VERSION_GA_COL) and pkg.get(LATEST_GA_DATE_COL))] + packages = [ + pkg + for pkg in packages + if (pkg.get(VERSION_GA_COL) and pkg.get(LATEST_GA_DATE_COL)) + ] logger.info(f"Filtered to {len(packages)} GA packages") outdated_packages = [ @@ -904,9 +944,19 @@ def update_release_logs( ) # Extract package names from the filtered lists - outdated_package_names = [pkg.get(PACKAGE_COL, "") for pkg in outdated_packages if pkg.get(PACKAGE_COL)] - new_data_plane_names = [pkg.get(PACKAGE_COL, "") for pkg in new_data_plane_packages if pkg.get(PACKAGE_COL)] - new_mgmt_plane_names = [pkg.get(PACKAGE_COL, "") for pkg in new_mgmt_plane_packages if pkg.get(PACKAGE_COL)] + outdated_package_names = [ + pkg.get(PACKAGE_COL, "") for pkg in outdated_packages if pkg.get(PACKAGE_COL) + ] + new_data_plane_names = [ + pkg.get(PACKAGE_COL, "") + for pkg in new_data_plane_packages + if pkg.get(PACKAGE_COL) + ] + new_mgmt_plane_names = [ + pkg.get(PACKAGE_COL, "") + for pkg in new_mgmt_plane_packages + if pkg.get(PACKAGE_COL) + ] # map package name to csv row for easy lookup package_dict = {pkg.get(PACKAGE_COL, ""): pkg for pkg in packages} @@ -918,7 +968,9 @@ def update_release_logs( ) # # handle new data plane libraries - new_data_plane_results = add_new_data_plane_packages(package_dict, new_data_plane_names) + new_data_plane_results = add_new_data_plane_packages( + package_dict, new_data_plane_names + ) # # handle new mgmt plane libraries # new_mgmt_plane_results = add_new_mgmt_plane_packages(new_mgmt_plane_packages) @@ -928,21 +980,21 @@ def update_release_logs( # outdated_packages + new_packages, new_version # ) - # print("=== REPORT ===") + print("=== REPORT ===") - # if conda_sdk_client_pkgs_result: - # print( - # "The following packages may require manual adjustments in conda-sdk-client.yml:" - # ) - # for pkg_name in conda_sdk_client_pkgs_result: - # print(f"- {pkg_name}") + if conda_sdk_client_pkgs_result: + print( + "The following packages may require manual adjustments in conda-sdk-client.yml:" + ) + for pkg_name in conda_sdk_client_pkgs_result: + print(f"- {pkg_name}") - # if new_data_plane_results: - # print( - # "\nThe following new data plane packages may require manual meta.yaml creation or adjustments:" - # ) - # for pkg_name in new_data_plane_results: - # print(f"- {pkg_name}") + if new_data_plane_results: + print( + "\nThe following new data plane packages may require manual meta.yaml creation or adjustments:" + ) + for pkg_name in new_data_plane_results: + print(f"- {pkg_name}") # if new_mgmt_plane_results: # print( From 3083629c009868b49db0e9d764d72159879f12db Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Tue, 6 Jan 2026 23:50:39 -0800 Subject: [PATCH 036/112] minor clean --- conda/update_conda_files.py | 87 ++++++++++++++++++++++++------------- 1 file changed, 57 insertions(+), 30 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 58e02635f578..9c443917886b 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -50,6 +50,10 @@ "msal-extensions", ] +# ===================================== +# Helpers for updating conda_env.yml +# ===================================== + class quoted(str): pass @@ -85,6 +89,11 @@ def update_conda_version() -> Tuple[datetime, str]: return old_date, new_version +# ===================================== +# Utility functions +# ===================================== + + def parse_csv() -> List[Dict[str, str]]: """Download and parse the Azure SDK Python packages CSV file.""" try: @@ -226,6 +235,11 @@ def build_package_index(conda_artifacts: List[Dict]) -> Dict[str, Tuple[int, int return package_index +# ===================================== +# Helpers for updating conda-sdk-client.yml +# ===================================== + + class IndentDumper(yaml.SafeDumper): """Used to preserve indentation levels in conda-sdk-client.yml.""" @@ -340,13 +354,12 @@ def update_conda_sdk_client_yml( for package_name in new_data_plane_packages: pkg = package_dict.get(package_name, {}) - # TODO commented out for testing purposes only - # if package_name in package_index: - # logger.warning( - # f"New package {package_name} already exists in conda-sdk-client.yml, skipping addition" - # ) - # result.append(package_name) - # continue + if package_name in package_index: + logger.warning( + f"New package {package_name} already exists in conda-sdk-client.yml, skipping addition" + ) + result.append(package_name) + continue # check if package belongs to a release group group_name = get_release_group(package_name, package_to_group) @@ -433,7 +446,7 @@ def update_conda_sdk_client_yml( f"Added package {pkg_entry['package']} to existing CondaArtifact {group_name}" ) - # Add new mgmt plane packages + # === Add new mgmt plane packages === logger.info( f"Detected {len(new_mgmt_plane_packages)} new management plane packages to add to conda-sdk-client.yml" @@ -488,6 +501,11 @@ def update_conda_sdk_client_yml( return result +# ===================================== +# Helpers for creating conda-recipes//meta.yaml files +# ===================================== + + def get_package_path(package_name: str) -> str: """Get the filesystem path of an SDK package given its name.""" pattern = os.path.join(SDK_DIR, "**", package_name) @@ -585,7 +603,7 @@ def get_package_metadata(package_name: str, package_path: str) -> Tuple[str, str else: summary = f"Microsoft Azure {package_name.replace('azure-', '').replace('-', ' ').title()} Client Library for Python" - # TODO definitely need to check if this is actually correct + # TODO definitely need to check if this is actually always correct conda_url = f"https://aka.ms/azsdk/conda/releases/{service_dir}" description = ( f"This is the {summary}.\n Please see {conda_url} for version details." @@ -746,6 +764,11 @@ def add_new_data_plane_packages( return result +# ===================================== +# Helpers for adding new mgmt plane packages to azure-mgmt/meta.yaml +# ===================================== + + def add_new_mgmt_plane_packages(new_packages: List[Dict[str, str]]) -> List[str]: """Update azure-mgmt/meta.yaml with new management libraries, and add import tests.""" if len(new_packages) == 0: @@ -823,6 +846,11 @@ def add_new_mgmt_plane_packages(new_packages: List[Dict[str, str]]) -> List[str] return result +# ===================================== +# Helpers for updating release logs +# ===================================== + + def update_release_logs( packages_to_update: List[Dict[str, str]], release_date: str ) -> List[str]: @@ -862,7 +890,6 @@ def update_release_logs( content += f"## {release_date}\n\n" content += "### Packages included\n\n" - # TODO what about when there's multiple packages...e.g. azure-schemaregistry content += f"- {package_name}-{version}\n" with open(release_log_path, "w") as f: @@ -967,18 +994,18 @@ def update_release_logs( package_dict, outdated_package_names, new_data_plane_names, new_mgmt_plane_names ) - # # handle new data plane libraries + # handle new data plane libraries new_data_plane_results = add_new_data_plane_packages( package_dict, new_data_plane_names ) - # # handle new mgmt plane libraries - # new_mgmt_plane_results = add_new_mgmt_plane_packages(new_mgmt_plane_packages) + # handle new mgmt plane libraries + new_mgmt_plane_results = add_new_mgmt_plane_packages(new_mgmt_plane_packages) - # # add/update release logs - # release_log_results = update_release_logs( - # outdated_packages + new_packages, new_version - # ) + # add/update release logs + release_log_results = update_release_logs( + outdated_packages + new_packages, new_version + ) print("=== REPORT ===") @@ -996,16 +1023,16 @@ def update_release_logs( for pkg_name in new_data_plane_results: print(f"- {pkg_name}") - # if new_mgmt_plane_results: - # print( - # "\nThe following new management plane packages may require manual adjustments in azure-mgmt/meta.yaml:" - # ) - # for pkg_name in new_mgmt_plane_results: - # print(f"- {pkg_name}") - - # if release_log_results: - # print( - # "\nThe following packages may require manual adjustments in release logs:" - # ) - # for pkg_name in release_log_results: - # print(f"- {pkg_name}") + if new_mgmt_plane_results: + print( + "\nThe following new management plane packages may require manual adjustments in azure-mgmt/meta.yaml:" + ) + for pkg_name in new_mgmt_plane_results: + print(f"- {pkg_name}") + + if release_log_results: + print( + "\nThe following packages may require manual adjustments in release logs:" + ) + for pkg_name in release_log_results: + print(f"- {pkg_name}") From a72eca50f98fb43c039073eb128b0c4b9adbf651 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Wed, 7 Jan 2026 00:29:12 -0800 Subject: [PATCH 037/112] minor --- conda/update_conda_files.py | 29 +++++++++++++++++------------ 1 file changed, 17 insertions(+), 12 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 9c443917886b..3a1a963099b1 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -850,33 +850,38 @@ def add_new_mgmt_plane_packages(new_packages: List[Dict[str, str]]) -> List[str] # Helpers for updating release logs # ===================================== - def update_release_logs( - packages_to_update: List[Dict[str, str]], release_date: str + package_dict: Dict, new_data_plane_names: List[str], new_mgmt_plane_names: List[str], release_date: str ) -> List[str]: """ - Add and update release logs for conda packages. Release log includes all version changes and new packages. + Add and update release logs for conda packages. Release log includes versions of all packages for the release """ result = [] # TODO update mgmt release log separately mgmt_release_log_path = os.path.join(CONDA_RELEASE_LOGS_DIR, "azure-mgmt.md") - # update data plane release logs - for pkg in packages_to_update: - package_name = pkg.get(PACKAGE_COL) - version = pkg.get(VERSION_GA_COL) + # TODO update all existing data plane release logs - if not package_name: - logger.warning("Skipping package with missing name") - continue + # TODO update release logs for new packages + for package_name in new_data_plane_names: + pkg = package_dict.get(package_name, {}) + version = pkg.get(VERSION_GA_COL) if not version: logger.warning(f"Skipping {package_name} with missing version") result.append(package_name) continue - release_log_path = os.path.join(CONDA_RELEASE_LOGS_DIR, f"{package_name}.md") + # check for group + group_name = get_release_group(package_name, get_package_to_group_mapping()) + group_data = get_package_group_data(group_name) + if group_data: + release_log_path = os.path.join( + CONDA_RELEASE_LOGS_DIR, f"{group_name}.md" + ) + else: + release_log_path = os.path.join(CONDA_RELEASE_LOGS_DIR, f"{package_name}.md") if not os.path.exists(release_log_path): # Add new release log @@ -1004,7 +1009,7 @@ def update_release_logs( # add/update release logs release_log_results = update_release_logs( - outdated_packages + new_packages, new_version + package_dict, new_data_plane_names, new_mgmt_plane_names, new_version ) print("=== REPORT ===") From 3b11434a33d71a0ef91076705ee4b3b2b2aea553 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Wed, 7 Jan 2026 09:54:40 -0800 Subject: [PATCH 038/112] refactor utility functions --- conda/conda_helper_functions.py | 161 +++++++++++++++++++++++++++++++ conda/update_conda_files.py | 165 +++----------------------------- 2 files changed, 176 insertions(+), 150 deletions(-) create mode 100644 conda/conda_helper_functions.py diff --git a/conda/conda_helper_functions.py b/conda/conda_helper_functions.py new file mode 100644 index 000000000000..e38b8248a4c1 --- /dev/null +++ b/conda/conda_helper_functions.py @@ -0,0 +1,161 @@ +""" +Helper functions for updating conda files. +""" + +from typing import Dict, List, Optional, Tuple +import csv +import json +from ci_tools.logging import logger +import urllib.request +from datetime import datetime + +AZURE_SDK_CSV_URL = "https://raw.githubusercontent.com/Azure/azure-sdk/main/_data/releases/latest/python-packages.csv" +PACKAGE_COL = "Package" +LATEST_GA_DATE_COL = "LatestGADate" +VERSION_GA_COL = "VersionGA" +FIRST_GA_DATE_COL = "FirstGADate" +DISPLAY_NAME_COL = "DisplayName" +SERVICE_NAME_COL = "ServiceName" +REPO_PATH_COL = "RepoPath" +TYPE_COL = "Type" + + +def parse_csv() -> List[Dict[str, str]]: + """Download and parse the Azure SDK Python packages CSV file.""" + try: + logger.info(f"Downloading CSV from {AZURE_SDK_CSV_URL}") + + with urllib.request.urlopen(AZURE_SDK_CSV_URL) as response: + csv_content = response.read().decode("utf-8") + + # Parse the CSV content + csv_reader = csv.DictReader(csv_content.splitlines()) + packages = list(csv_reader) + + logger.info(f"Successfully parsed {len(packages)} packages from CSV") + + return packages + + except Exception as e: + logger.error(f"Failed to download or parse CSV: {e}") + return [] + + +def is_mgmt_package(pkg: Dict[str, str]) -> bool: + pkg_name = pkg.get(PACKAGE_COL, "") + _type = pkg.get(TYPE_COL, "") + if _type == "mgmt": + return True + elif _type == "client": + return False + else: + return pkg_name != "azure-mgmt-core" and ( + "mgmt" in pkg_name or "cognitiveservices" in pkg_name + ) + + +def separate_packages_by_type( + packages: List[Dict[str, str]], +) -> Tuple[List[Dict[str, str]], List[Dict[str, str]]]: + """Separate packages into data plane and management plane libraries.""" + data_plane_packages = [] + mgmt_plane_packages = [] + + for pkg in packages: + if is_mgmt_package(pkg): + mgmt_plane_packages.append(pkg) + else: + data_plane_packages.append(pkg) + + logger.debug( + f"Separated {len(data_plane_packages)} data plane and {len(mgmt_plane_packages)} management plane packages" + ) + + return (data_plane_packages, mgmt_plane_packages) + + +def package_needs_update( + package_row: Dict[str, str], prev_release_date: str, is_new=False +) -> bool: + """ + Check if the package is new or needs version update (i.e., FirstGADate or LatestGADate is after the last release). + + :param package_row: The parsed CSV row for the package. + :param prev_release_date: The date of the previous release in "mm/dd/yyyy" format. + :param is_new: Whether to check for new package (FirstGADate) or outdated package (LatestGADate). + :return: if the package is new or needs an update. + """ + compare_date = ( + package_row.get(FIRST_GA_DATE_COL) + if is_new + else package_row.get(LATEST_GA_DATE_COL) + ) + + logger.debug( + f"Checking {'new package' if is_new else 'outdated package'} for package {package_row.get(PACKAGE_COL)} with against date: {compare_date}" + ) + + if not compare_date: + logger.debug( + f"Package {package_row.get(PACKAGE_COL)} is skipped due to missing {FIRST_GA_DATE_COL if is_new else LATEST_GA_DATE_COL}." + ) + + # TODO need to verify that this is the desired behavior / we're not skipping needed packages + + return False + + try: + # Convert string dates to datetime objects for proper comparison + compare_date = datetime.strptime(compare_date, "%m/%d/%Y") + prev_date = datetime.strptime(prev_release_date, "%m/%d/%Y") + logger.debug( + f"Comparing {package_row.get(PACKAGE_COL)} CompareDate {compare_date} with previous release date {prev_date}" + ) + return compare_date > prev_date + except ValueError as e: + logger.error( + f"Date parsing error for package {package_row.get(PACKAGE_COL)}: {e}" + ) + return False + + +def get_package_data_from_pypi( + package_name: str, +) -> Tuple[Optional[str], Optional[str]]: + """Fetch the latest version and download URI for a package from PyPI.""" + pypi_url = f"https://pypi.org/pypi/{package_name}/json" + try: + with urllib.request.urlopen(pypi_url, timeout=10) as response: + data = json.loads(response.read().decode("utf-8")) + + # Get the latest version + latest_version = data["info"]["version"] + if latest_version in data["releases"] and data["releases"][latest_version]: + # Get the source distribution (sdist) if available + files = data["releases"][latest_version] + source_dist = next( + (f for f in files if f["packagetype"] == "sdist"), None + ) + if source_dist: + download_url = source_dist["url"] + logger.info( + f"Found download URL for {package_name}=={latest_version}: {download_url}" + ) + return latest_version, download_url + + except Exception as e: + logger.error(f"Failed to fetch download URI from PyPI for {package_name}: {e}") + return None, None + + +def build_package_index(conda_artifacts: List[Dict]) -> Dict[str, Tuple[int, int]]: + """Build an index of package name -> (artifact_idx, checkout_idx) for fast lookups in conda-sdk-client.yml.""" + package_index = {} + + for artifact_idx, artifact in enumerate(conda_artifacts): + if "checkout" in artifact: + for checkout_idx, checkout_item in enumerate(artifact["checkout"]): + package_name = checkout_item.get("package") + if package_name: + package_index[package_name] = (artifact_idx, checkout_idx) + return package_index diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 3a1a963099b1..b3c810ae8a19 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -13,6 +13,13 @@ from ci_tools.logging import logger, configure_logging from ci_tools.parsing import ParsedSetup, extract_package_metadata from typing import Dict, List, Optional, Tuple +from conda_helper_functions import ( + parse_csv, + separate_packages_by_type, + package_needs_update, + get_package_data_from_pypi, + build_package_index, +) from conda_release_groups import ( get_package_group_data, @@ -89,152 +96,6 @@ def update_conda_version() -> Tuple[datetime, str]: return old_date, new_version -# ===================================== -# Utility functions -# ===================================== - - -def parse_csv() -> List[Dict[str, str]]: - """Download and parse the Azure SDK Python packages CSV file.""" - try: - logger.info(f"Downloading CSV from {AZURE_SDK_CSV_URL}") - - with urllib.request.urlopen(AZURE_SDK_CSV_URL) as response: - csv_content = response.read().decode("utf-8") - - # Parse the CSV content - csv_reader = csv.DictReader(csv_content.splitlines()) - packages = list(csv_reader) - - logger.info(f"Successfully parsed {len(packages)} packages from CSV") - - return packages - - except Exception as e: - logger.error(f"Failed to download or parse CSV: {e}") - return [] - - -def is_mgmt_package(pkg: Dict[str, str]) -> bool: - pkg_name = pkg.get(PACKAGE_COL, "") - _type = pkg.get(TYPE_COL, "") - if _type == "mgmt": - return True - elif _type == "client": - return False - else: - return pkg_name != "azure-mgmt-core" and ( - "mgmt" in pkg_name or "cognitiveservices" in pkg_name - ) - - -def separate_packages_by_type( - packages: List[Dict[str, str]], -) -> Tuple[List[Dict[str, str]], List[Dict[str, str]]]: - """Separate packages into data plane and management plane libraries.""" - data_plane_packages = [] - mgmt_plane_packages = [] - - for pkg in packages: - if is_mgmt_package(pkg): - mgmt_plane_packages.append(pkg) - else: - data_plane_packages.append(pkg) - - logger.debug( - f"Separated {len(data_plane_packages)} data plane and {len(mgmt_plane_packages)} management plane packages" - ) - - return (data_plane_packages, mgmt_plane_packages) - - -def package_needs_update( - package_row: Dict[str, str], prev_release_date: str, is_new=False -) -> bool: - """ - Check if the package is new or needs version update (i.e., FirstGADate or LatestGADate is after the last release). - - :param package_row: The parsed CSV row for the package. - :param prev_release_date: The date of the previous release in "mm/dd/yyyy" format. - :param is_new: Whether to check for new package (FirstGADate) or outdated package (LatestGADate). - :return: if the package is new or needs an update. - """ - compare_date = ( - package_row.get(FIRST_GA_DATE_COL) - if is_new - else package_row.get(LATEST_GA_DATE_COL) - ) - - logger.debug( - f"Checking {'new package' if is_new else 'outdated package'} for package {package_row.get(PACKAGE_COL)} with against date: {compare_date}" - ) - - if not compare_date: - logger.debug( - f"Package {package_row.get(PACKAGE_COL)} is skipped due to missing {FIRST_GA_DATE_COL if is_new else LATEST_GA_DATE_COL}." - ) - - # TODO need to verify that this is the desired behavior / we're not skipping needed packages - - return False - - try: - # Convert string dates to datetime objects for proper comparison - compare_date = datetime.strptime(compare_date, "%m/%d/%Y") - prev_date = datetime.strptime(prev_release_date, "%m/%d/%Y") - logger.debug( - f"Comparing {package_row.get(PACKAGE_COL)} CompareDate {compare_date} with previous release date {prev_date}" - ) - return compare_date > prev_date - except ValueError as e: - logger.error( - f"Date parsing error for package {package_row.get(PACKAGE_COL)}: {e}" - ) - return False - - -def get_package_data_from_pypi( - package_name: str, -) -> Tuple[Optional[str], Optional[str]]: - """Fetch the latest version and download URI for a package from PyPI.""" - pypi_url = f"https://pypi.org/pypi/{package_name}/json" - try: - with urllib.request.urlopen(pypi_url, timeout=10) as response: - data = json.loads(response.read().decode("utf-8")) - - # Get the latest version - latest_version = data["info"]["version"] - if latest_version in data["releases"] and data["releases"][latest_version]: - # Get the source distribution (sdist) if available - files = data["releases"][latest_version] - source_dist = next( - (f for f in files if f["packagetype"] == "sdist"), None - ) - if source_dist: - download_url = source_dist["url"] - logger.info( - f"Found download URL for {package_name}=={latest_version}: {download_url}" - ) - return latest_version, download_url - - except Exception as e: - logger.error(f"Failed to fetch download URI from PyPI for {package_name}: {e}") - return None, None - - -def build_package_index(conda_artifacts: List[Dict]) -> Dict[str, Tuple[int, int]]: - """Build an index of package name -> (artifact_idx, checkout_idx) for fast lookups in conda-sdk-client.yml.""" - package_index = {} - - for artifact_idx, artifact in enumerate(conda_artifacts): - if "checkout" in artifact: - for checkout_idx, checkout_item in enumerate(artifact["checkout"]): - package_name = checkout_item.get("package") - if package_name: - package_index[package_name] = (artifact_idx, checkout_idx) - return package_index - - # ===================================== # Helpers for updating conda-sdk-client.yml # ===================================== @@ -850,8 +711,12 @@ def add_new_mgmt_plane_packages(new_packages: List[Dict[str, str]]) -> List[str] # Helpers for updating release logs # ===================================== + def update_release_logs( - package_dict: Dict, new_data_plane_names: List[str], new_mgmt_plane_names: List[str], release_date: str + package_dict: Dict, + new_data_plane_names: List[str], + new_mgmt_plane_names: List[str], + release_date: str, ) -> List[str]: """ Add and update release logs for conda packages. Release log includes versions of all packages for the release @@ -877,11 +742,11 @@ def update_release_logs( group_name = get_release_group(package_name, get_package_to_group_mapping()) group_data = get_package_group_data(group_name) if group_data: + release_log_path = os.path.join(CONDA_RELEASE_LOGS_DIR, f"{group_name}.md") + else: release_log_path = os.path.join( - CONDA_RELEASE_LOGS_DIR, f"{group_name}.md" + CONDA_RELEASE_LOGS_DIR, f"{package_name}.md" ) - else: - release_log_path = os.path.join(CONDA_RELEASE_LOGS_DIR, f"{package_name}.md") if not os.path.exists(release_log_path): # Add new release log From 6e5c12998548e81889e996771a62e222707d529a Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Wed, 7 Jan 2026 10:27:05 -0800 Subject: [PATCH 039/112] minor progress --- conda/conda_helper_functions.py | 12 ++++++- conda/conda_release_groups.py | 2 +- conda/update_conda_files.py | 63 +++++++++++++++++++++------------ 3 files changed, 52 insertions(+), 25 deletions(-) diff --git a/conda/conda_helper_functions.py b/conda/conda_helper_functions.py index e38b8248a4c1..490a7acc6a15 100644 --- a/conda/conda_helper_functions.py +++ b/conda/conda_helper_functions.py @@ -1,7 +1,8 @@ """ Helper functions for updating conda files. """ - +import os +import glob from typing import Dict, List, Optional, Tuple import csv import json @@ -9,6 +10,9 @@ import urllib.request from datetime import datetime +# TODO move the constants into a third file +ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) +SDK_DIR = os.path.join(ROOT_DIR, "sdk") AZURE_SDK_CSV_URL = "https://raw.githubusercontent.com/Azure/azure-sdk/main/_data/releases/latest/python-packages.csv" PACKAGE_COL = "Package" LATEST_GA_DATE_COL = "LatestGADate" @@ -159,3 +163,9 @@ def build_package_index(conda_artifacts: List[Dict]) -> Dict[str, Tuple[int, int if package_name: package_index[package_name] = (artifact_idx, checkout_idx) return package_index + +def get_package_path(package_name: str) -> str: + """Get the filesystem path of an SDK package given its name.""" + pattern = os.path.join(SDK_DIR, "**", package_name) + matches = glob.glob(pattern, recursive=True) + return matches[0] diff --git a/conda/conda_release_groups.py b/conda/conda_release_groups.py index f0db324fc0a4..d92ae28914ed 100644 --- a/conda/conda_release_groups.py +++ b/conda/conda_release_groups.py @@ -106,7 +106,7 @@ def get_release_group(package_name: str, package_to_group: dict) -> str: Get the release group name for a given package. :param package_name: The package name (e.g., "azure-core", "azure-communication-chat") - :return: The release group name (e.g., "azure-core", "azure-communication") + :return: The release group name (e.g., "azure-core", "azure-communication"), or package name itself if not grouped """ return package_to_group.get(package_name, package_name) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index b3c810ae8a19..553f48a27211 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -19,6 +19,7 @@ package_needs_update, get_package_data_from_pypi, build_package_index, + get_package_path ) from conda_release_groups import ( @@ -366,14 +367,6 @@ def update_conda_sdk_client_yml( # Helpers for creating conda-recipes//meta.yaml files # ===================================== - -def get_package_path(package_name: str) -> str: - """Get the filesystem path of an SDK package given its name.""" - pattern = os.path.join(SDK_DIR, "**", package_name) - matches = glob.glob(pattern, recursive=True) - return matches[0] - - def determine_service_info( pkg: Dict[str, str], package_to_group: dict ) -> Tuple[str, str]: @@ -585,6 +578,7 @@ def add_new_data_plane_packages( logger.info(f"Adding {len(new_data_plane_names)} new data plane packages") result = [] + # grouped packages are processed once when encountering the first package in that group group_names_processed = set() for package_name in new_data_plane_names: logger.info(f"Adding new data plane meta.yaml for: {package_name}") @@ -722,13 +716,35 @@ def update_release_logs( Add and update release logs for conda packages. Release log includes versions of all packages for the release """ result = [] - - # TODO update mgmt release log separately - mgmt_release_log_path = os.path.join(CONDA_RELEASE_LOGS_DIR, "azure-mgmt.md") + package_to_group = get_package_to_group_mapping() # TODO update all existing data plane release logs + existing_release_logs = glob.glob(os.path.join(CONDA_RELEASE_LOGS_DIR, "azure-*.md")) + for release_log_path in existing_release_logs: + curr_service_name = os.path.basename(release_log_path).replace(".md", "") + try: + with open(release_log_path, "r") as f: + existing_content = f.read() + + lines = existing_content.split("\n") + + new_release = f"\n## {release_date}\n\n" + new_release += "### Packages included\n\n" + + group_name = get_release_group(curr_service_name, package_to_group) + group_data = get_package_group_data(group_name) - # TODO update release logs for new packages + # with open(release_log_path, "w") as f: + # f.write(updated_content) + + logger.info(f"Updated release log for {os.path.basename(release_log_path)}") + except Exception as e: + logger.error( + f"Failed to update release log {os.path.basename(release_log_path)}: {e}" + ) + result.append(curr_service_name) + + # TODO release logs for new packages for package_name in new_data_plane_names: pkg = package_dict.get(package_name, {}) version = pkg.get(VERSION_GA_COL) @@ -750,24 +766,24 @@ def update_release_logs( if not os.path.exists(release_log_path): # Add new release log - logger.info(f"Creating new release log for: {package_name}") + logger.info(f"Creating new release log for: {group_name}") try: - title_parts = package_name.replace("azure-", "").split("-") + title_parts = group_name.replace("azure-", "").split("-") title = " ".join(word.title() for word in title_parts) content = f"# Azure {title} client library for Python (conda)\n\n" content += f"## {release_date}\n\n" content += "### Packages included\n\n" - content += f"- {package_name}-{version}\n" + content += f"- {group_name}-{version}\n" with open(release_log_path, "w") as f: f.write(content) - logger.info(f"Created new release log for {package_name}") + logger.info(f"Created new release log for {group_name}") except Exception as e: - logger.error(f"Failed to create release log for {package_name}: {e}") - result.append(package_name) + logger.error(f"Failed to create release log for {group_name}: {e}") + result.append(group_name) else: # Update existing release log @@ -792,6 +808,9 @@ def update_release_logs( logger.error(f"Failed to update release log for {package_name}: {e}") result.append(package_name) + # TODO update mgmt release log separately + mgmt_release_log_path = os.path.join(CONDA_RELEASE_LOGS_DIR, "azure-mgmt.md") + # TODO AKA link pointing to new release logs needs to happen return result @@ -815,9 +834,7 @@ def update_release_logs( # convert to mm/dd/yyyy format for comparison with CSV dates old_version = old_date.strftime("%m/%d/%Y") - # Parse CSV data packages = parse_csv() - if not packages: logger.error("No packages found in CSV data.") exit(1) @@ -840,6 +857,9 @@ def update_release_logs( new_packages ) + # map package name to csv row for easy lookup + package_dict = {pkg.get(PACKAGE_COL, ""): pkg for pkg in packages} + # Extract package names from the filtered lists outdated_package_names = [ pkg.get(PACKAGE_COL, "") for pkg in outdated_packages if pkg.get(PACKAGE_COL) @@ -855,9 +875,6 @@ def update_release_logs( if pkg.get(PACKAGE_COL) ] - # map package name to csv row for easy lookup - package_dict = {pkg.get(PACKAGE_COL, ""): pkg for pkg in packages} - # update conda-sdk-client.yml # TODO handle packages missing from conda-sdk-client that aren't new relative to the last release... conda_sdk_client_pkgs_result = update_conda_sdk_client_yml( From 966fb40b1aff091f9c1e2fab1d0183631ccedb26 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Wed, 7 Jan 2026 12:41:10 -0800 Subject: [PATCH 040/112] data plane release logs --- conda/update_conda_files.py | 96 ++++++++++++++++++++----------------- 1 file changed, 53 insertions(+), 43 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 553f48a27211..d077f52bded9 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -19,7 +19,7 @@ package_needs_update, get_package_data_from_pypi, build_package_index, - get_package_path + get_package_path, ) from conda_release_groups import ( @@ -367,6 +367,7 @@ def update_conda_sdk_client_yml( # Helpers for creating conda-recipes//meta.yaml files # ===================================== + def determine_service_info( pkg: Dict[str, str], package_to_group: dict ) -> Tuple[str, str]: @@ -706,22 +707,50 @@ def add_new_mgmt_plane_packages(new_packages: List[Dict[str, str]]) -> List[str] # ===================================== -def update_release_logs( +def update_data_plane_release_logs( package_dict: Dict, new_data_plane_names: List[str], - new_mgmt_plane_names: List[str], release_date: str, ) -> List[str]: """ - Add and update release logs for conda packages. Release log includes versions of all packages for the release + Add and update release logs for data plane conda packages. Release log includes versions of all packages for the release """ result = [] package_to_group = get_package_to_group_mapping() - # TODO update all existing data plane release logs - existing_release_logs = glob.glob(os.path.join(CONDA_RELEASE_LOGS_DIR, "azure-*.md")) + # Update all existing data plane release logs by file + # Note, for new packages added to an existing group, this should handle that as well + # if conda_release_groups.py was updated to include the new package in the group + + existing_release_logs = glob.glob( + os.path.join(CONDA_RELEASE_LOGS_DIR, "azure-*.md") + ) for release_log_path in existing_release_logs: curr_service_name = os.path.basename(release_log_path).replace(".md", "") + # skip azure-mgmt here + if curr_service_name == "azure-mgmt": + continue + if curr_service_name not in package_dict: + logger.warning( + f"Skipping existing data plane release log update for {curr_service_name} because it was not found in CSV data" + ) + result.append(curr_service_name) + continue + + group_name = get_release_group(curr_service_name, package_to_group) + group_data = get_package_group_data(group_name) + + pkg_updates = set() + if group_data: + pkg_names_in_log = group_data["packages"] + for pkg_name in pkg_names_in_log: + pkg = package_dict.get(pkg_name, {}) + version = pkg.get(VERSION_GA_COL) + pkg_updates.update(f"- {pkg_name}-{version}\n") + else: + pkg = package_dict.get(curr_service_name, {}) + version = pkg.get(VERSION_GA_COL) + pkg_updates.update(f"- {curr_service_name}-{version}\n") try: with open(release_log_path, "r") as f: existing_content = f.read() @@ -731,11 +760,12 @@ def update_release_logs( new_release = f"\n## {release_date}\n\n" new_release += "### Packages included\n\n" - group_name = get_release_group(curr_service_name, package_to_group) - group_data = get_package_group_data(group_name) + new_release += "".join(pkg_updates) + lines.insert(1, new_release) + updated_content = "\n".join(lines) - # with open(release_log_path, "w") as f: - # f.write(updated_content) + with open(release_log_path, "w") as f: + f.write(updated_content) logger.info(f"Updated release log for {os.path.basename(release_log_path)}") except Exception as e: @@ -744,7 +774,7 @@ def update_release_logs( ) result.append(curr_service_name) - # TODO release logs for new packages + # Handle brand new packages for package_name in new_data_plane_names: pkg = package_dict.get(package_name, {}) version = pkg.get(VERSION_GA_COL) @@ -765,7 +795,7 @@ def update_release_logs( ) if not os.path.exists(release_log_path): - # Add new release log + # Add brand new release log file logger.info(f"Creating new release log for: {group_name}") try: @@ -786,32 +816,9 @@ def update_release_logs( result.append(group_name) else: - # Update existing release log - try: - with open(release_log_path, "r") as f: - existing_content = f.read() - - lines = existing_content.split("\n") - - new_release = f"\n## {release_date}\n\n" - new_release += "### Packages included\n\n" - new_release += f"- {package_name}-{version}\n" - - lines.insert(1, new_release) - updated_content = "\n".join(lines) - - with open(release_log_path, "w") as f: - f.write(updated_content) - - logger.info(f"Updated release log for {package_name}") - except Exception as e: - logger.error(f"Failed to update release log for {package_name}: {e}") - result.append(package_name) - - # TODO update mgmt release log separately - mgmt_release_log_path = os.path.join(CONDA_RELEASE_LOGS_DIR, "azure-mgmt.md") - - # TODO AKA link pointing to new release logs needs to happen + logger.info( + f"Release log for {group_name} already exists, check that new package {package_name} is included" + ) return result @@ -890,9 +897,12 @@ def update_release_logs( new_mgmt_plane_results = add_new_mgmt_plane_packages(new_mgmt_plane_packages) # add/update release logs - release_log_results = update_release_logs( - package_dict, new_data_plane_names, new_mgmt_plane_names, new_version + data_plane_release_log_results = update_data_plane_release_logs( + package_dict, new_data_plane_names, new_version ) + # TODO handle mgmt separately + + # TODO AKA link logic print("=== REPORT ===") @@ -917,9 +927,9 @@ def update_release_logs( for pkg_name in new_mgmt_plane_results: print(f"- {pkg_name}") - if release_log_results: + if data_plane_release_log_results: print( - "\nThe following packages may require manual adjustments in release logs:" + "\nThe following data plane packages may require manual adjustments in release logs:" ) - for pkg_name in release_log_results: + for pkg_name in data_plane_release_log_results: print(f"- {pkg_name}") From 1220941387da42abdfeeaee0dc67037241044d54 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Wed, 7 Jan 2026 13:06:19 -0800 Subject: [PATCH 041/112] remove azure-common from group, need to handle --- conda/conda_release_groups.py | 2 +- conda/update_conda_files.py | 55 ++++++++++++++++++++++++++--------- 2 files changed, 42 insertions(+), 15 deletions(-) diff --git a/conda/conda_release_groups.py b/conda/conda_release_groups.py index d92ae28914ed..2777a57ee2ad 100644 --- a/conda/conda_release_groups.py +++ b/conda/conda_release_groups.py @@ -18,7 +18,7 @@ RELEASE_GROUPS = { # Core "azure-core": { - "packages": ["azure-core", "azure-mgmt-core", "azure-common"], + "packages": ["azure-core", "azure-mgmt-core"], "common_root": "azure", "service": "core", }, diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index d077f52bded9..9c4195a2dbfc 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -719,11 +719,11 @@ def update_data_plane_release_logs( package_to_group = get_package_to_group_mapping() # Update all existing data plane release logs by file - # Note, for new packages added to an existing group, this should handle that as well - # if conda_release_groups.py was updated to include the new package in the group + # NOTE: for new packages added to an existing release group, this should handle that as well + # as long as conda_release_groups.py was updated to include the new package in the group existing_release_logs = glob.glob( - os.path.join(CONDA_RELEASE_LOGS_DIR, "azure-*.md") + os.path.join(CONDA_RELEASE_LOGS_DIR, "*.md") ) for release_log_path in existing_release_logs: curr_service_name = os.path.basename(release_log_path).replace(".md", "") @@ -732,25 +732,26 @@ def update_data_plane_release_logs( continue if curr_service_name not in package_dict: logger.warning( - f"Skipping existing data plane release log update for {curr_service_name} because it was not found in CSV data" + f"Existing release log service {curr_service_name} was not found in CSV data, using same versions as before. Check that it's not deprecated." ) result.append(curr_service_name) + #TODO continue group_name = get_release_group(curr_service_name, package_to_group) group_data = get_package_group_data(group_name) - pkg_updates = set() + pkg_updates = [] if group_data: pkg_names_in_log = group_data["packages"] for pkg_name in pkg_names_in_log: pkg = package_dict.get(pkg_name, {}) version = pkg.get(VERSION_GA_COL) - pkg_updates.update(f"- {pkg_name}-{version}\n") + pkg_updates.append(f"- {pkg_name}-{version}\n") else: pkg = package_dict.get(curr_service_name, {}) version = pkg.get(VERSION_GA_COL) - pkg_updates.update(f"- {curr_service_name}-{version}\n") + pkg_updates.append(f"- {curr_service_name}-{version}\n") try: with open(release_log_path, "r") as f: existing_content = f.read() @@ -798,16 +799,27 @@ def update_data_plane_release_logs( # Add brand new release log file logger.info(f"Creating new release log for: {group_name}") - try: - title_parts = group_name.replace("azure-", "").split("-") - title = " ".join(word.title() for word in title_parts) + title_parts = group_name.replace("azure-", "").split("-") + title = " ".join(word.title() for word in title_parts) - content = f"# Azure {title} client library for Python (conda)\n\n" - content += f"## {release_date}\n\n" - content += "### Packages included\n\n" + content = f"# Azure {title} client library for Python (conda)\n\n" + content += f"## {release_date}\n\n" + content += "### Packages included\n\n" - content += f"- {group_name}-{version}\n" + pkg_updates = [] + if group_data: + pkg_names_in_log = group_data["packages"] + for pkg_name in pkg_names_in_log: + pkg = package_dict.get(pkg_name, {}) + version = pkg.get(VERSION_GA_COL) + pkg_updates.append(f"- {pkg_name}-{version}\n") + else: + pkg = package_dict.get(package_name, {}) + version = pkg.get(VERSION_GA_COL) + pkg_updates.append(f"- {package_name}-{version}\n") + content += "".join(pkg_updates) + try: with open(release_log_path, "w") as f: f.write(content) logger.info(f"Created new release log for {group_name}") @@ -822,6 +834,21 @@ def update_data_plane_release_logs( return result +def update_mgmt_plane_release_log( + package_dict: Dict, + new_mgmt_plane_names: List[str], + release_date: str, +) -> List[str]: + result = [] + + mgmt_log_path = os.path.join(CONDA_RELEASE_LOGS_DIR, "azure-mgmt.md") + if not os.path.exists(mgmt_log_path): + logger.error("Management plane release log azure-mgmt.md does not exist.") + return new_mgmt_plane_names # all new packages need attention + + + + return result if __name__ == "__main__": parser = argparse.ArgumentParser( From 5b3ebdf37314641531297c0bce8bd5aed1856e51 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Wed, 7 Jan 2026 13:15:56 -0800 Subject: [PATCH 042/112] data plane existing release log update almost fully works --- conda/update_conda_files.py | 36 +++++++++++++++++++++++------------- 1 file changed, 23 insertions(+), 13 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 9c4195a2dbfc..f5c1b99b861c 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -722,9 +722,7 @@ def update_data_plane_release_logs( # NOTE: for new packages added to an existing release group, this should handle that as well # as long as conda_release_groups.py was updated to include the new package in the group - existing_release_logs = glob.glob( - os.path.join(CONDA_RELEASE_LOGS_DIR, "*.md") - ) + existing_release_logs = glob.glob(os.path.join(CONDA_RELEASE_LOGS_DIR, "*.md")) for release_log_path in existing_release_logs: curr_service_name = os.path.basename(release_log_path).replace(".md", "") # skip azure-mgmt here @@ -735,7 +733,7 @@ def update_data_plane_release_logs( f"Existing release log service {curr_service_name} was not found in CSV data, using same versions as before. Check that it's not deprecated." ) result.append(curr_service_name) - #TODO + # TODO continue group_name = get_release_group(curr_service_name, package_to_group) @@ -747,11 +745,23 @@ def update_data_plane_release_logs( for pkg_name in pkg_names_in_log: pkg = package_dict.get(pkg_name, {}) version = pkg.get(VERSION_GA_COL) - pkg_updates.append(f"- {pkg_name}-{version}\n") + if version: + pkg_updates.append(f"- {pkg_name}-{version}") + else: + logger.warning( + f"Package {pkg_name} in group {group_name} is missing version info, it may be deprecated. Skipping in release log update" + ) + result.append(pkg_name) else: pkg = package_dict.get(curr_service_name, {}) version = pkg.get(VERSION_GA_COL) - pkg_updates.append(f"- {curr_service_name}-{version}\n") + if version: + pkg_updates.append(f"- {curr_service_name}-{version}") + else: + logger.warning( + f"Package {curr_service_name} is missing version info, it may be deprecated. Skipping in release log update" + ) + result.append(curr_service_name) try: with open(release_log_path, "r") as f: existing_content = f.read() @@ -761,7 +771,7 @@ def update_data_plane_release_logs( new_release = f"\n## {release_date}\n\n" new_release += "### Packages included\n\n" - new_release += "".join(pkg_updates) + new_release += "\n".join(pkg_updates) lines.insert(1, new_release) updated_content = "\n".join(lines) @@ -812,12 +822,12 @@ def update_data_plane_release_logs( for pkg_name in pkg_names_in_log: pkg = package_dict.get(pkg_name, {}) version = pkg.get(VERSION_GA_COL) - pkg_updates.append(f"- {pkg_name}-{version}\n") + pkg_updates.append(f"- {pkg_name}-{version}") else: pkg = package_dict.get(package_name, {}) version = pkg.get(VERSION_GA_COL) - pkg_updates.append(f"- {package_name}-{version}\n") - content += "".join(pkg_updates) + pkg_updates.append(f"- {package_name}-{version}") + content += "\n".join(pkg_updates) try: with open(release_log_path, "w") as f: @@ -834,7 +844,8 @@ def update_data_plane_release_logs( return result -def update_mgmt_plane_release_log( + +def update_mgmt_plane_release_log( package_dict: Dict, new_mgmt_plane_names: List[str], release_date: str, @@ -846,10 +857,9 @@ def update_mgmt_plane_release_log( logger.error("Management plane release log azure-mgmt.md does not exist.") return new_mgmt_plane_names # all new packages need attention - - return result + if __name__ == "__main__": parser = argparse.ArgumentParser( description="Update conda package files and versions for release." From 4961d44cf6d1a1f3cefef8fcd85c48386fcff550 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Wed, 7 Jan 2026 13:25:41 -0800 Subject: [PATCH 043/112] exception for healthinsights --- conda/conda_release_groups.py | 8 +++++++- conda/update_conda_files.py | 5 ++--- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/conda/conda_release_groups.py b/conda/conda_release_groups.py index 2777a57ee2ad..8a77e2cc6421 100644 --- a/conda/conda_release_groups.py +++ b/conda/conda_release_groups.py @@ -78,7 +78,8 @@ "common_root": "azure/keyvault", "service": "keyvault", }, - # Packages with common root exceptions + # Packages with other pattern exceptions, e.g. different common root + # or service vs package name mismatch "msrest": {"packages": ["msrest"], "common_root": None}, "msal": {"packages": ["msal"], "common_root": None}, "msal-extensions": { @@ -89,6 +90,11 @@ "packages": ["azure-ai-vision-imageanalysis"], "common_root": "azure/vision", }, + "azure-healthinsights": { + "packages": ["azure-healthinsights-patient-insights"], + "common_root": "azure", + "service": "healthinsights", + }, } diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index f5c1b99b861c..7361adabec8d 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -728,12 +728,11 @@ def update_data_plane_release_logs( # skip azure-mgmt here if curr_service_name == "azure-mgmt": continue - if curr_service_name not in package_dict: + if curr_service_name not in package_dict and curr_service_name not in package_to_group.values(): logger.warning( - f"Existing release log service {curr_service_name} was not found in CSV data, using same versions as before. Check that it's not deprecated." + f"Existing release log service {curr_service_name} was not found in CSV data, skipping update. It may be deprecated." ) result.append(curr_service_name) - # TODO continue group_name = get_release_group(curr_service_name, package_to_group) From 0ab4bd3a72b078f94bd80c2867a6227c8ddb535e Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Wed, 7 Jan 2026 13:42:01 -0800 Subject: [PATCH 044/112] yayay mgmt release log --- conda/update_conda_files.py | 58 ++++++++++++++++++++++++++++++++++--- 1 file changed, 54 insertions(+), 4 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 7361adabec8d..4f8f78b6d4ae 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -728,7 +728,10 @@ def update_data_plane_release_logs( # skip azure-mgmt here if curr_service_name == "azure-mgmt": continue - if curr_service_name not in package_dict and curr_service_name not in package_to_group.values(): + if ( + curr_service_name not in package_dict + and curr_service_name not in package_to_group.values() + ): logger.warning( f"Existing release log service {curr_service_name} was not found in CSV data, skipping update. It may be deprecated." ) @@ -846,15 +849,51 @@ def update_data_plane_release_logs( def update_mgmt_plane_release_log( package_dict: Dict, - new_mgmt_plane_names: List[str], + all_mgmt_plane_names: List[str], release_date: str, ) -> List[str]: + """ + Update azure-mgmt release log. + """ result = [] mgmt_log_path = os.path.join(CONDA_RELEASE_LOGS_DIR, "azure-mgmt.md") if not os.path.exists(mgmt_log_path): logger.error("Management plane release log azure-mgmt.md does not exist.") - return new_mgmt_plane_names # all new packages need attention + return all_mgmt_plane_names # all new packages need attention + + pkg_updates = [] + for package_name in all_mgmt_plane_names: + pkg = package_dict.get(package_name, {}) + version = pkg.get(VERSION_GA_COL) + + if not version: + logger.warning( + f"Skipping release log update of {package_name} with missing version" + ) + result.append(package_name) + continue + + pkg_updates.append(f"- {package_name}-{version}") + + try: + with open(mgmt_log_path, "r") as f: + existing_content = f.read() + + lines = existing_content.split("\n") + + new_release = f"\n## {release_date}\n\n" + new_release += "### Packages included\n\n" + + new_release += "\n".join(pkg_updates) + lines.insert(1, new_release) + updated_content = "\n".join(lines) + + with open(mgmt_log_path, "w") as f: + f.write(updated_content) + except Exception as e: + logger.error(f"Failed to update azure-mgmt release log: {e}") + return all_mgmt_plane_names return result @@ -882,6 +921,8 @@ def update_mgmt_plane_release_log( logger.error("No packages found in CSV data.") exit(1) + # TODO clean this part up + # Only ship GA packages that are not deprecated packages = [ pkg @@ -900,6 +941,12 @@ def update_mgmt_plane_release_log( new_packages ) + # need for mgmt release log update + _, all_mgmt_packages = separate_packages_by_type(packages) + all_mgmt_packages = [ + pkg.get(PACKAGE_COL, "") for pkg in all_mgmt_packages if pkg.get(PACKAGE_COL) + ] + # map package name to csv row for easy lookup package_dict = {pkg.get(PACKAGE_COL, ""): pkg for pkg in packages} @@ -936,7 +983,10 @@ def update_mgmt_plane_release_log( data_plane_release_log_results = update_data_plane_release_logs( package_dict, new_data_plane_names, new_version ) - # TODO handle mgmt separately + + mgmt_plane_release_log_results = update_mgmt_plane_release_log( + package_dict, all_mgmt_packages, new_version + ) # TODO AKA link logic From d5b543a1834ff1f07dea7ffa9f0f4b32caeb1fd3 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Wed, 7 Jan 2026 13:46:53 -0800 Subject: [PATCH 045/112] uampq --- conda/conda_helper_functions.py | 7 ++++++- conda/update_conda_files.py | 6 +++++- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/conda/conda_helper_functions.py b/conda/conda_helper_functions.py index 490a7acc6a15..ce7ca6839018 100644 --- a/conda/conda_helper_functions.py +++ b/conda/conda_helper_functions.py @@ -1,7 +1,8 @@ """ Helper functions for updating conda files. """ -import os + +import os import glob from typing import Dict, List, Optional, Tuple import csv @@ -100,6 +101,9 @@ def package_needs_update( ) if not compare_date: + if package_row.get(PACKAGE_COL) == "uamqp": + return True # uamqp is an exception + logger.debug( f"Package {package_row.get(PACKAGE_COL)} is skipped due to missing {FIRST_GA_DATE_COL if is_new else LATEST_GA_DATE_COL}." ) @@ -164,6 +168,7 @@ def build_package_index(conda_artifacts: List[Dict]) -> Dict[str, Tuple[int, int package_index[package_name] = (artifact_idx, checkout_idx) return package_index + def get_package_path(package_name: str) -> str: """Get the filesystem path of an SDK package given its name.""" pattern = os.path.join(SDK_DIR, "**", package_name) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 4f8f78b6d4ae..b0f50e65648e 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -924,10 +924,14 @@ def update_mgmt_plane_release_log( # TODO clean this part up # Only ship GA packages that are not deprecated + # uamqp is an exception as it has VersionGA but no LatestGADate or FirstGADate packages = [ pkg for pkg in packages - if (pkg.get(VERSION_GA_COL) and pkg.get(LATEST_GA_DATE_COL)) + if ( + (pkg.get(PACKAGE_COL) == "uamqp") + or (pkg.get(VERSION_GA_COL) and pkg.get(LATEST_GA_DATE_COL)) + ) ] logger.info(f"Filtered to {len(packages)} GA packages") From 3b428132800e1ad27204cbcc60b5b310dc4c03fe Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Wed, 7 Jan 2026 13:48:33 -0800 Subject: [PATCH 046/112] minor --- conda/conda_helper_functions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/conda/conda_helper_functions.py b/conda/conda_helper_functions.py index ce7ca6839018..c4aba33c44fb 100644 --- a/conda/conda_helper_functions.py +++ b/conda/conda_helper_functions.py @@ -101,7 +101,7 @@ def package_needs_update( ) if not compare_date: - if package_row.get(PACKAGE_COL) == "uamqp": + if not is_new and package_row.get(PACKAGE_COL) == "uamqp": return True # uamqp is an exception logger.debug( From 7849d86d02134d4a5e82c6ae9367fdafac3688bf Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Wed, 7 Jan 2026 16:54:53 -0800 Subject: [PATCH 047/112] fix hyphenated mgmt imports --- conda/conda-recipes/azure-mgmt/meta.yaml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/conda/conda-recipes/azure-mgmt/meta.yaml b/conda/conda-recipes/azure-mgmt/meta.yaml index 9735b91ed0fe..a91087d648d9 100644 --- a/conda/conda-recipes/azure-mgmt/meta.yaml +++ b/conda/conda-recipes/azure-mgmt/meta.yaml @@ -75,7 +75,7 @@ test: - azure.mgmt.applicationinsights.v2022_06_15.aio.operations - azure.mgmt.applicationinsights.v2022_06_15.models - azure.mgmt.applicationinsights.v2022_06_15.operations - - azure-mgmt-arizeaiobservabilityeval + - azure.mgmt.arizeaiobservabilityeval - azure.mgmt.arizeaiobservabilityeval.aio - azure.mgmt.arizeaiobservabilityeval.aio.operations - azure.mgmt.arizeaiobservabilityeval.models @@ -152,7 +152,7 @@ test: - azure.mgmt.botservice.aio.operations - azure.mgmt.botservice.models - azure.mgmt.botservice.operations - - azure-mgmt-carbonoptimization + - azure.mgmt.carbonoptimization - azure.mgmt.carbonoptimization.aio - azure.mgmt.carbonoptimization.aio.operations - azure.mgmt.carbonoptimization.models @@ -421,7 +421,7 @@ test: - azure.mgmt.hanaonazure.aio.operations - azure.mgmt.hanaonazure.models - azure.mgmt.hanaonazure.operations - - azure-mgmt-hardwaresecuritymodules + - azure.mgmt.hardwaresecuritymodules - azure.mgmt.hardwaresecuritymodules.aio - azure.mgmt.hardwaresecuritymodules.aio.operations - azure.mgmt.hardwaresecuritymodules.models @@ -517,7 +517,7 @@ test: - azure.mgmt.labservices.aio.operations - azure.mgmt.labservices.models - azure.mgmt.labservices.operations - - azure-mgmt-lambdatesthyperexecute + - azure.mgmt.lambdatesthyperexecute - azure.mgmt.lambdatesthyperexecute.aio - azure.mgmt.lambdatesthyperexecute.aio.operations - azure.mgmt.lambdatesthyperexecute.models @@ -612,7 +612,7 @@ test: - azure.mgmt.mongocluster.aio.operations - azure.mgmt.mongocluster.models - azure.mgmt.mongocluster.operations - - azure-mgmt-mongodbatlas + - azure.mgmt.mongodbatlas - azure.mgmt.mongodbatlas.aio - azure.mgmt.mongodbatlas.aio.operations - azure.mgmt.mongodbatlas.models @@ -732,7 +732,7 @@ test: - azure.mgmt.privatedns.aio.operations - azure.mgmt.privatedns.models - azure.mgmt.privatedns.operations - - azure-mgmt-purestorageblock + - azure.mgmt.purestorageblock - azure.mgmt.purestorageblock.aio - azure.mgmt.purestorageblock.aio.operations - azure.mgmt.purestorageblock.models @@ -793,7 +793,7 @@ test: - azure.mgmt.recoveryservicesbackup.passivestamp.aio.operations - azure.mgmt.recoveryservicesbackup.passivestamp.models - azure.mgmt.recoveryservicesbackup.passivestamp.operations - - azure-mgmt-recoveryservicesdatareplication + - azure.mgmt.recoveryservicesdatareplication - azure.mgmt.recoveryservicesdatareplication.aio - azure.mgmt.recoveryservicesdatareplication.aio.operations - azure.mgmt.recoveryservicesdatareplication.models @@ -974,7 +974,7 @@ test: - azure.mgmt.storage.aio.operations - azure.mgmt.storage.models - azure.mgmt.storage.operations - - azure-mgmt-storageactions + - azure.mgmt.storageactions - azure.mgmt.storageactions.aio - azure.mgmt.storageactions.aio.operations - azure.mgmt.storageactions.models From fb73b2caf3bf83f00664dc5f1efc3af60098132c Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Wed, 7 Jan 2026 17:16:52 -0800 Subject: [PATCH 048/112] check if release log already has section --- conda/update_conda_files.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index b0f50e65648e..f1ff015a546b 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -771,6 +771,14 @@ def update_data_plane_release_logs( lines = existing_content.split("\n") new_release = f"\n## {release_date}\n\n" + + # check if release is already logged + if new_release in existing_content: + logger.info( + f"Release log for {curr_service_name} already contains entry for {release_date}, skipping update" + ) + continue + new_release += "### Packages included\n\n" new_release += "\n".join(pkg_updates) @@ -883,6 +891,14 @@ def update_mgmt_plane_release_log( lines = existing_content.split("\n") new_release = f"\n## {release_date}\n\n" + + # check if release is already logged + if new_release in existing_content: + logger.info( + f"Release log for azure-mgmt already contains entry for {release_date}, skipping update" + ) + return result + new_release += "### Packages included\n\n" new_release += "\n".join(pkg_updates) From 39a6fc50332d9f818790500faa788c8f534ddcf4 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Wed, 7 Jan 2026 17:25:20 -0800 Subject: [PATCH 049/112] remove uamqp ref --- conda/update_conda_files.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index f1ff015a546b..87c6ee43b0a0 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -940,14 +940,10 @@ def update_mgmt_plane_release_log( # TODO clean this part up # Only ship GA packages that are not deprecated - # uamqp is an exception as it has VersionGA but no LatestGADate or FirstGADate packages = [ pkg for pkg in packages - if ( - (pkg.get(PACKAGE_COL) == "uamqp") - or (pkg.get(VERSION_GA_COL) and pkg.get(LATEST_GA_DATE_COL)) - ) + if pkg.get(VERSION_GA_COL) and pkg.get(LATEST_GA_DATE_COL) ] logger.info(f"Filtered to {len(packages)} GA packages") From e7a094a658387320289a2712d2e0157ac93fb259 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Thu, 8 Jan 2026 10:04:27 -0800 Subject: [PATCH 050/112] overwrite release log existing sections --- conda/update_conda_files.py | 34 +++++++++++++++++++++++++++++----- 1 file changed, 29 insertions(+), 5 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 87c6ee43b0a0..c4e4ac66b122 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -775,14 +775,26 @@ def update_data_plane_release_logs( # check if release is already logged if new_release in existing_content: logger.info( - f"Release log for {curr_service_name} already contains entry for {release_date}, skipping update" + f"Release log for {curr_service_name} already contains entry for {release_date}, overwriting" ) - continue + # remove existing release section to overwrite + release_idx = lines.index(new_release.strip()) + + ## find next release heading or end of file + next_release_idx = next( + ( + i + for i in range(release_idx + 1, len(lines)) + if lines[i].startswith("## ") + ), + len(lines), + ) + del lines[release_idx:next_release_idx] new_release += "### Packages included\n\n" - new_release += "\n".join(pkg_updates) lines.insert(1, new_release) + updated_content = "\n".join(lines) with open(release_log_path, "w") as f: @@ -895,9 +907,21 @@ def update_mgmt_plane_release_log( # check if release is already logged if new_release in existing_content: logger.info( - f"Release log for azure-mgmt already contains entry for {release_date}, skipping update" + f"Release log for azure-mgmt already contains entry for {release_date}, overwriting" + ) + # remove existing release section to overwrite + release_idx = lines.index(new_release.strip()) + + ## find next release heading or end of file + next_release_idx = next( + ( + i + for i in range(release_idx + 1, len(lines)) + if lines[i].startswith("## ") + ), + len(lines), ) - return result + del lines[release_idx:next_release_idx] new_release += "### Packages included\n\n" From 74bb699eb2397d7d9466635715429249f4f2ba2c Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Thu, 8 Jan 2026 10:10:28 -0800 Subject: [PATCH 051/112] minor temp grouping fix --- conda/conda_release_groups.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/conda/conda_release_groups.py b/conda/conda_release_groups.py index 8a77e2cc6421..a5d6d2464760 100644 --- a/conda/conda_release_groups.py +++ b/conda/conda_release_groups.py @@ -91,7 +91,7 @@ "common_root": "azure/vision", }, "azure-healthinsights": { - "packages": ["azure-healthinsights-patient-insights"], + "packages": ["azure-healthinsights-radiology-insights"], "common_root": "azure", "service": "healthinsights", }, From 599831337d6ca5aeeb3384d487472d2e9c56f707 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Thu, 8 Jan 2026 10:12:36 -0800 Subject: [PATCH 052/112] fix --- conda/conda_release_groups.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/conda/conda_release_groups.py b/conda/conda_release_groups.py index a5d6d2464760..31a1dff5bc75 100644 --- a/conda/conda_release_groups.py +++ b/conda/conda_release_groups.py @@ -91,7 +91,7 @@ "common_root": "azure/vision", }, "azure-healthinsights": { - "packages": ["azure-healthinsights-radiology-insights"], + "packages": ["azure-healthinsights-radiologyinsights"], "common_root": "azure", "service": "healthinsights", }, From df29426fba2e8ee4d2a9507a5f8cf9fafbc606aa Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Thu, 8 Jan 2026 11:00:56 -0800 Subject: [PATCH 053/112] additional deprecated check --- conda/update_conda_files.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index c4e4ac66b122..3b1da06a064b 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -51,6 +51,7 @@ SERVICE_NAME_COL = "ServiceName" REPO_PATH_COL = "RepoPath" TYPE_COL = "Type" +SUPPORT_COL = "Support" # packages that should be shipped but are known to be missing from the csv PACKAGES_WITH_DOWNLOAD_URI = [ @@ -967,7 +968,10 @@ def update_mgmt_plane_release_log( packages = [ pkg for pkg in packages - if pkg.get(VERSION_GA_COL) and pkg.get(LATEST_GA_DATE_COL) + if ( + (pkg.get(VERSION_GA_COL) and pkg.get(LATEST_GA_DATE_COL)) + and not pkg.get(SUPPORT_COL) == "deprecated" + ) ] logger.info(f"Filtered to {len(packages)} GA packages") From 03e9bf775e73db6f4dd7cb18c6511223f36cae2d Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Thu, 8 Jan 2026 14:57:36 -0800 Subject: [PATCH 054/112] parse stable vs beta release --- .../ci_tools/parsing/parse_functions.py | 29 +++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/eng/tools/azure-sdk-tools/ci_tools/parsing/parse_functions.py b/eng/tools/azure-sdk-tools/ci_tools/parsing/parse_functions.py index 0c5bb8b5ebab..a37a007438bf 100644 --- a/eng/tools/azure-sdk-tools/ci_tools/parsing/parse_functions.py +++ b/eng/tools/azure-sdk-tools/ci_tools/parsing/parse_functions.py @@ -357,6 +357,15 @@ def get_config_setting(self, setting: str, default: Any = True) -> Any: def is_reporting_suppressed(self, setting: str) -> bool: return compare_string_to_glob_array(setting, self.get_config_setting("suppressed_skip_warnings", [])) + def is_stable_release(self) -> bool: + """ + Check if this package is a stable release version. + + :rtype: bool + :return: True if this is a stable release, False if beta + """ + return classify_release_type(self.version) == "stable" + def __str__(self): lines = [f"ParsedSetup from {self.folder}"] for attr in [ @@ -841,3 +850,23 @@ def compare_string_to_glob_array(string: str, glob_array: List[str]) -> bool: This function is used to easily compare a string to a set of glob strings, if it matches any of them, returns True. """ return any([fnmatch.fnmatch(string, glob) for glob in glob_array]) + + +def classify_release_type(version: str) -> str: + """ + Classify a package version as 'beta' or 'stable' based on version string patterns. + + :param str version: The version string to classify (e.g., "1.0.0", "2.1.0b1", "1.5.0a2") + :rtype: str + :return: Either "beta" or "stable" + + Examples: + "1.0.0" -> "stable" + "2.1.0b1" -> "beta" + "1.5.0a2" -> "beta" + "3.0.0rc1" -> "beta" + "1.0.0.dev20241201" -> "beta" + """ + if "b" in version.lower(): + return "beta" + return "stable" From d1d589938f1e2edc8eb138877dc32727ed5ff31b Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Thu, 8 Jan 2026 15:15:00 -0800 Subject: [PATCH 055/112] test pyproject bundle --- sdk/core/azure-core/pyproject.toml | 4 ++++ sdk/core/azure-mgmt-core/pyproject.toml | 4 ++++ 2 files changed, 8 insertions(+) diff --git a/sdk/core/azure-core/pyproject.toml b/sdk/core/azure-core/pyproject.toml index c847603044f5..22f7fa7364b7 100644 --- a/sdk/core/azure-core/pyproject.toml +++ b/sdk/core/azure-core/pyproject.toml @@ -20,3 +20,7 @@ name = "no_aiohttp" install = [] uninstall = ["aiohttp"] additional_pytest_args = ["-k", "_async.py"] + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-core" diff --git a/sdk/core/azure-mgmt-core/pyproject.toml b/sdk/core/azure-mgmt-core/pyproject.toml index 2f6cd1709824..5837349771e9 100644 --- a/sdk/core/azure-mgmt-core/pyproject.toml +++ b/sdk/core/azure-mgmt-core/pyproject.toml @@ -4,3 +4,7 @@ verifytypes = true pyright = false breaking = false black = true + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-core" From 7a21437f4a49a9be1a37c5b6d82a0fe1d5ac0b3f Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Thu, 8 Jan 2026 16:18:17 -0800 Subject: [PATCH 056/112] begin pyproject.toml logic attempt --- conda/conda_helper_functions.py | 44 ++++++++++++++++--- .../ci_tools/parsing/parse_functions.py | 22 ++++++++++ 2 files changed, 59 insertions(+), 7 deletions(-) diff --git a/conda/conda_helper_functions.py b/conda/conda_helper_functions.py index c4aba33c44fb..0fcef1632c3b 100644 --- a/conda/conda_helper_functions.py +++ b/conda/conda_helper_functions.py @@ -10,6 +10,7 @@ from ci_tools.logging import logger import urllib.request from datetime import datetime +from ci_tools.parsing import ParsedSetup # TODO move the constants into a third file ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) @@ -24,6 +25,42 @@ REPO_PATH_COL = "RepoPath" TYPE_COL = "Type" +# ===================================== +# Helpers for handling bundled releases +# ===================================== + +def get_package_path(package_name: str) -> str: + """Get the filesystem path of an SDK package given its name.""" + pattern = os.path.join(SDK_DIR, "**", package_name) + matches = glob.glob(pattern, recursive=True) + return matches[0] + +def get_bundle_name(package_name: str) -> Optional[str]: + """ + Check bundled release config from package's pyproject.toml file. + + If bundled, return the bundle name; otherwise, return None. + """ + package_path = get_package_path(package_name) + parsed = ParsedSetup.from_path(package_path) + if not parsed: + # TODO raise something + logger.error(f"Failed to parse setup for package {package_name}") + return None + + # don't expect beta releases to have conda config, TODO raise something + if not parsed.is_stable_release(): + return None + + conda_config = parsed.get_conda_config() + if conda_config and "bundle_name" in conda_config: + return conda_config["bundle_name"] + + return None + +# ===================================== +# Utility functions for parsing data +# ===================================== def parse_csv() -> List[Dict[str, str]]: """Download and parse the Azure SDK Python packages CSV file.""" @@ -167,10 +204,3 @@ def build_package_index(conda_artifacts: List[Dict]) -> Dict[str, Tuple[int, int if package_name: package_index[package_name] = (artifact_idx, checkout_idx) return package_index - - -def get_package_path(package_name: str) -> str: - """Get the filesystem path of an SDK package given its name.""" - pattern = os.path.join(SDK_DIR, "**", package_name) - matches = glob.glob(pattern, recursive=True) - return matches[0] diff --git a/eng/tools/azure-sdk-tools/ci_tools/parsing/parse_functions.py b/eng/tools/azure-sdk-tools/ci_tools/parsing/parse_functions.py index a37a007438bf..5f4f809988eb 100644 --- a/eng/tools/azure-sdk-tools/ci_tools/parsing/parse_functions.py +++ b/eng/tools/azure-sdk-tools/ci_tools/parsing/parse_functions.py @@ -351,6 +351,9 @@ def from_path(cls, parse_directory_or_file: str): def get_build_config(self) -> Optional[Dict[str, Any]]: return get_build_config(self.folder) + def get_conda_config(self) -> Optional[Dict[str, Any]]: + return get_conda_config(self.folder) + def get_config_setting(self, setting: str, default: Any = True) -> Any: return get_config_setting(self.folder, setting, default) @@ -462,6 +465,25 @@ def get_build_config(package_path: str) -> Optional[Dict[str, Any]]: except: return {} +def get_conda_config(package_path: str) -> Optional[Dict[str, Any]]: + """ + Attempts to retrieve all values within [tools.azure-sdk-conda] section of a pyproject.toml. + """ + if os.path.isfile(package_path): + package_path = os.path.dirname(package_path) + + toml_file = os.path.join(package_path, "pyproject.toml") + + if os.path.exists(toml_file): + try: + with open(toml_file, "rb") as f: + toml_dict = toml.load(f) + if "tool" in toml_dict: + tool_configs = toml_dict["tool"] + if "azure-sdk-conda" in tool_configs: + return tool_configs["azure-sdk-conda"] + except: + return {} def get_ci_config(package_path: str) -> Optional[Dict[str, Any]]: """ From c2af7cd2a92571fac81b4f9cf32fcc090ccee624 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Fri, 9 Jan 2026 09:55:31 -0800 Subject: [PATCH 057/112] refactor conda sdk client update --- conda/conda_helper_functions.py | 4 +- conda/update_conda_files.py | 122 ++++++++++++++------------------ 2 files changed, 58 insertions(+), 68 deletions(-) diff --git a/conda/conda_helper_functions.py b/conda/conda_helper_functions.py index 0fcef1632c3b..7e61a5737510 100644 --- a/conda/conda_helper_functions.py +++ b/conda/conda_helper_functions.py @@ -48,10 +48,12 @@ def get_bundle_name(package_name: str) -> Optional[str]: logger.error(f"Failed to parse setup for package {package_name}") return None - # don't expect beta releases to have conda config, TODO raise something + # don't expect beta releases to have conda config, TODO raise something, as we shouldn't be calling this on betas if not parsed.is_stable_release(): return None + # TODO raise something if conda_config is missing + conda_config = parsed.get_conda_config() if conda_config and "bundle_name" in conda_config: return conda_config["bundle_name"] diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 3b1da06a064b..094a059b647d 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -20,6 +20,7 @@ get_package_data_from_pypi, build_package_index, get_package_path, + get_bundle_name, ) from conda_release_groups import ( @@ -205,7 +206,6 @@ def update_conda_sdk_client_yml( f"Detected {len(new_data_plane_packages)} new data plane packages to add to conda-sdk-client.yml" ) - package_to_group = get_package_to_group_mapping() parameters = conda_client_data["parameters"] # quick look up for handling grouped package releases @@ -224,15 +224,16 @@ def update_conda_sdk_client_yml( result.append(package_name) continue - # check if package belongs to a release group - group_name = get_release_group(package_name, package_to_group) - group_data = get_package_group_data(group_name) + # bundle info is based on pyproject.toml + bundle_name = get_bundle_name(package_name) - if group_data: - # package is part of a release group - logger.info(f"Package {package_name} belongs to release group {group_name}") - release_name = f"release_{group_name.replace('-', '_')}" - display_name = group_name + if bundle_name: + # package is part of a bundle + logger.info( + f"Package {package_name} belongs to release bundle {bundle_name}" + ) + release_name = f"release_{bundle_name.replace('-', '_')}" + display_name = bundle_name else: # package is released individually release_name = f"release_{package_name.replace('-', '_')}" @@ -240,6 +241,7 @@ def update_conda_sdk_client_yml( # add new release parameter if not exists if release_name not in existing_parameter_names: + logger.info(f"Adding new release parameter: {release_name}") new_parameter = { "name": release_name, "displayName": display_name, @@ -247,67 +249,56 @@ def update_conda_sdk_client_yml( "default": True, } parameters.append(new_parameter) + existing_parameter_names.append(release_name) # add to CondaArtifacts - common_root, service_name = determine_service_info(pkg, package_to_group) + common_root, service_name = determine_service_info(pkg, bundle_name) - # build checkout packages - if group_data: - checkout_packages = [] - for grouped_pkg_name in group_data["packages"]: - curr_pkg = package_dict.get(grouped_pkg_name, {}) - if not curr_pkg: - logger.error( - f"Package {grouped_pkg_name} listed in group {group_name} not found in CSV data, skipping" - ) - result.append(grouped_pkg_name) - continue - curr_version = curr_pkg.get(VERSION_GA_COL) - if curr_version: - checkout_packages.append( - {"package": grouped_pkg_name, "version": curr_version} - ) - else: - logger.error( - f"Package {grouped_pkg_name} in group {group_name} is missing version info, skipping" - ) - result.append(grouped_pkg_name) - else: - checkout_packages = [ - {"package": package_name, "version": pkg.get(VERSION_GA_COL)} - ] + curr_version = pkg.get(VERSION_GA_COL) + + if not curr_version: + logger.error( + f"Package {package_name} is missing version info, skipping addition" + ) + result.append(package_name) + continue + + checkout_package = {"package": package_name, "version": curr_version} - if group_name not in existing_artifact_names: + if package_name in existing_artifact_names: + # individual released package already exists + logger.warning( + f"New package {package_name} already exists in conda-sdk-client.yml, skipping addition" + ) + result.append(package_name) + continue + + if bundle_name and bundle_name in existing_artifact_names: + # bundle already exists, will append packages to it + logger.info( + f"Release bundle {bundle_name} already exists in conda-sdk-client.yml, will append package {package_name} to it" + ) + conda_artifacts[existing_artifact_names[bundle_name]]["checkout"].append( + checkout_package + ) + else: + # no existing artifact, whether bundle or not -> create new_artifact_entry = { - "name": group_name if group_data else package_name, + "name": bundle_name if bundle_name else package_name, "common_root": common_root, "service": service_name, "in_batch": f"${{{{ parameters.{release_name} }}}}", - "checkout": checkout_packages, + "checkout": [checkout_package], } - # append before azure-mgmt entry conda_artifacts.insert(len(conda_artifacts) - 1, new_artifact_entry) added_count += 1 logger.info(f"Added new data plane package: {package_name}") - else: - logger.info( - f"CondaArtifact for {group_name if group_data else package_name} already exists in conda-sdk-client.yml" - ) - curr_artifact_checkout = conda_artifacts[ - existing_artifact_names[group_name] - ]["checkout"] - packages_in_artifact = {item["package"] for item in curr_artifact_checkout} - - # account for adding new packages to an existing group - for pkg_entry in checkout_packages: - if pkg_entry["package"] not in packages_in_artifact: - curr_artifact_checkout.append(pkg_entry) - added_count += 1 - logger.info( - f"Added package {pkg_entry['package']} to existing CondaArtifact {group_name}" - ) + + existing_artifact_names[bundle_name if bundle_name else package_name] = ( + len(conda_artifacts) - 2 + ) # new index # === Add new mgmt plane packages === @@ -370,28 +361,24 @@ def update_conda_sdk_client_yml( def determine_service_info( - pkg: Dict[str, str], package_to_group: dict + pkg: Dict[str, str], bundle_name: Optional[str] ) -> Tuple[str, str]: """ Returns the common root and service name for the given package. :param package_name: The name of the package (e.g., "azure-ai-textanalytics"). - :param package_to_group: Mapping of package names to release group names. + :param bundle_name: The name of the bundle/release group the package belongs to, if any. """ # defaults package_name = pkg.get(PACKAGE_COL, "") service_name = pkg.get(REPO_PATH_COL, "").lower() - common_root = "azure" - # check for exceptions to the pattern - group_name = get_release_group(package_name, package_to_group) - group_data = get_package_group_data(group_name) + if bundle_name: + common_root = f"azure/{bundle_name.split('-')[1]}" + else: + common_root = "azure" - if group_data: - if group_data.get("service"): - service_name = group_data["service"] - if group_data.get("common_root"): - common_root = group_data["common_root"] + # TODO handle exceptions msrest,msal.msal-extensions,azure-ai-vision,azure-healthinsights if not service_name: service_name = os.path.basename(os.path.dirname(get_package_path(package_name))) @@ -1014,6 +1001,7 @@ def update_mgmt_plane_release_log( conda_sdk_client_pkgs_result = update_conda_sdk_client_yml( package_dict, outdated_package_names, new_data_plane_names, new_mgmt_plane_names ) + exit() # handle new data plane libraries new_data_plane_results = add_new_data_plane_packages( From 5595088a32eb54830990f42da715dd9ec1f191a5 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Fri, 9 Jan 2026 10:12:07 -0800 Subject: [PATCH 058/112] minor --- .../azure-sdk-tools/ci_tools/parsing/parse_functions.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/eng/tools/azure-sdk-tools/ci_tools/parsing/parse_functions.py b/eng/tools/azure-sdk-tools/ci_tools/parsing/parse_functions.py index 5f4f809988eb..4602a781a16c 100644 --- a/eng/tools/azure-sdk-tools/ci_tools/parsing/parse_functions.py +++ b/eng/tools/azure-sdk-tools/ci_tools/parsing/parse_functions.py @@ -881,13 +881,6 @@ def classify_release_type(version: str) -> str: :param str version: The version string to classify (e.g., "1.0.0", "2.1.0b1", "1.5.0a2") :rtype: str :return: Either "beta" or "stable" - - Examples: - "1.0.0" -> "stable" - "2.1.0b1" -> "beta" - "1.5.0a2" -> "beta" - "3.0.0rc1" -> "beta" - "1.0.0.dev20241201" -> "beta" """ if "b" in version.lower(): return "beta" From f52e0acc0bc3788f5efbe01a70af011562c8f222 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Fri, 9 Jan 2026 10:28:05 -0800 Subject: [PATCH 059/112] clean up package sorting --- conda/update_conda_files.py | 46 +++++++++++++++++-------------------- 1 file changed, 21 insertions(+), 25 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 094a059b647d..76280951b5db 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -208,7 +208,7 @@ def update_conda_sdk_client_yml( parameters = conda_client_data["parameters"] - # quick look up for handling grouped package releases + # quick look up for handling bundled package releases existing_parameter_names = [p.get("name") for p in parameters] existing_artifact_names = { a.get("name"): idx for idx, a in enumerate(conda_artifacts) @@ -252,8 +252,6 @@ def update_conda_sdk_client_yml( existing_parameter_names.append(release_name) # add to CondaArtifacts - common_root, service_name = determine_service_info(pkg, bundle_name) - curr_version = pkg.get(VERSION_GA_COL) if not curr_version: @@ -264,6 +262,7 @@ def update_conda_sdk_client_yml( continue checkout_package = {"package": package_name, "version": curr_version} + common_root, service_name = determine_service_info(pkg, bundle_name) if package_name in existing_artifact_names: # individual released package already exists @@ -962,20 +961,22 @@ def update_mgmt_plane_release_log( ] logger.info(f"Filtered to {len(packages)} GA packages") - outdated_packages = [ - pkg for pkg in packages if package_needs_update(pkg, old_version, is_new=False) + data_pkgs, mgmt_pkgs = separate_packages_by_type(packages) + outdated_data_pkgs = [ + pkg for pkg in data_pkgs if package_needs_update(pkg, old_version, is_new=False) + ] + new_data_plane_names = [ + pkg.get(PACKAGE_COL, "") + for pkg in data_pkgs + if package_needs_update(pkg, old_version, is_new=True) ] - new_packages = [ - pkg for pkg in packages if package_needs_update(pkg, old_version, is_new=True) + outdated_mgmt_pkgs = [ + pkg for pkg in mgmt_pkgs if package_needs_update(pkg, old_version, is_new=False) ] - new_data_plane_packages, new_mgmt_plane_packages = separate_packages_by_type( - new_packages - ) - - # need for mgmt release log update - _, all_mgmt_packages = separate_packages_by_type(packages) - all_mgmt_packages = [ - pkg.get(PACKAGE_COL, "") for pkg in all_mgmt_packages if pkg.get(PACKAGE_COL) + new_mgmt_plane_names = [ + pkg.get(PACKAGE_COL, "") + for pkg in mgmt_pkgs + if package_needs_update(pkg, old_version, is_new=True) ] # map package name to csv row for easy lookup @@ -983,16 +984,8 @@ def update_mgmt_plane_release_log( # Extract package names from the filtered lists outdated_package_names = [ - pkg.get(PACKAGE_COL, "") for pkg in outdated_packages if pkg.get(PACKAGE_COL) - ] - new_data_plane_names = [ pkg.get(PACKAGE_COL, "") - for pkg in new_data_plane_packages - if pkg.get(PACKAGE_COL) - ] - new_mgmt_plane_names = [ - pkg.get(PACKAGE_COL, "") - for pkg in new_mgmt_plane_packages + for pkg in (outdated_data_pkgs + outdated_mgmt_pkgs) if pkg.get(PACKAGE_COL) ] @@ -1001,12 +994,15 @@ def update_mgmt_plane_release_log( conda_sdk_client_pkgs_result = update_conda_sdk_client_yml( package_dict, outdated_package_names, new_data_plane_names, new_mgmt_plane_names ) - exit() + + # pre-process bundled packages to minimize file writes for new data plane packages, + # and release logs # handle new data plane libraries new_data_plane_results = add_new_data_plane_packages( package_dict, new_data_plane_names ) + exit() # handle new mgmt plane libraries new_mgmt_plane_results = add_new_mgmt_plane_packages(new_mgmt_plane_packages) From a3a3c974d24334b82b2dc825eb2389f66eb17be3 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Fri, 9 Jan 2026 10:47:43 -0800 Subject: [PATCH 060/112] start preprocessing bundle mapping from pyproject --- conda/conda_helper_functions.py | 22 +++++++++++++++++++++- conda/update_conda_files.py | 26 +++++++++++++++----------- 2 files changed, 36 insertions(+), 12 deletions(-) diff --git a/conda/conda_helper_functions.py b/conda/conda_helper_functions.py index 7e61a5737510..eaedc1172d28 100644 --- a/conda/conda_helper_functions.py +++ b/conda/conda_helper_functions.py @@ -29,10 +29,13 @@ # Helpers for handling bundled releases # ===================================== -def get_package_path(package_name: str) -> str: +def get_package_path(package_name: str) -> Optional[str]: """Get the filesystem path of an SDK package given its name.""" pattern = os.path.join(SDK_DIR, "**", package_name) matches = glob.glob(pattern, recursive=True) + if not matches: + logger.error(f"Package path not found for package: {package_name}") + return None return matches[0] def get_bundle_name(package_name: str) -> Optional[str]: @@ -42,6 +45,9 @@ def get_bundle_name(package_name: str) -> Optional[str]: If bundled, return the bundle name; otherwise, return None. """ package_path = get_package_path(package_name) + if not package_path: + logger.warning(f"Cannot determine package path for {package_name}") + return None parsed = ParsedSetup.from_path(package_path) if not parsed: # TODO raise something @@ -60,6 +66,20 @@ def get_bundle_name(package_name: str) -> Optional[str]: return None +def map_bundle_to_packages(package_names: List[str]) -> Dict[str, List[str]]: + """Create a mapping of bundle names to their constituent package names.""" + bundle_map = {} + + for package_name in package_names: + logger.debug(f"Processing package for bundle mapping: {package_name}") + bundle_name = get_bundle_name(package_name) + if bundle_name: + if bundle_name not in bundle_map: + bundle_map[bundle_name] = [] + bundle_map[bundle_name].append(package_name) + + return bundle_map + # ===================================== # Utility functions for parsing data # ===================================== diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 76280951b5db..751944634b88 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -2,13 +2,10 @@ import os import argparse -import json -import csv import yaml -import urllib.request import re import glob -from datetime import datetime, timedelta +from datetime import datetime from dateutil.relativedelta import relativedelta from ci_tools.logging import logger, configure_logging from ci_tools.parsing import ParsedSetup, extract_package_metadata @@ -21,6 +18,7 @@ build_package_index, get_package_path, get_bundle_name, + map_bundle_to_packages ) from conda_release_groups import ( @@ -378,9 +376,9 @@ def determine_service_info( common_root = "azure" # TODO handle exceptions msrest,msal.msal-extensions,azure-ai-vision,azure-healthinsights - - if not service_name: - service_name = os.path.basename(os.path.dirname(get_package_path(package_name))) + package_path = get_package_path(package_name) + if not service_name and package_path: + service_name = os.path.basename(os.path.dirname(package_path)) return common_root, service_name @@ -432,11 +430,14 @@ def get_package_requirements(parsed: ParsedSetup) -> Tuple[List[str], List[str]] return list(host_requirements), list(run_requirements) -def get_package_metadata(package_name: str, package_path: str) -> Tuple[str, str, str]: +def get_package_metadata(package_name: str, package_path: Optional[str]) -> Tuple[str, str, str]: """Extract package metadata for about section in meta.yaml.""" pkg_metadata = extract_package_metadata(package_path) - - service_dir = os.path.basename(os.path.dirname(package_path)) + if package_path: + service_dir = os.path.basename(os.path.dirname(package_path)) + else: + # TODO + service_dir = package_name.replace("azure-", "") home_url = f"https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/{service_dir}/{package_name}" # TODO check correctness of this @@ -495,8 +496,9 @@ def generate_data_plane_meta_yaml( host_reqs = list(host_reqs) run_reqs = list(run_reqs) + package_path = get_package_path(group_data["packages"][0]) home_url, summary, description = get_package_metadata( - group_name, get_package_path(group_data["packages"][0]) + group_name, package_path ) else: logger.info(f"Generating meta.yaml for package {package_name}") @@ -997,6 +999,8 @@ def update_mgmt_plane_release_log( # pre-process bundled packages to minimize file writes for new data plane packages, # and release logs + bundle_map = map_bundle_to_packages(list(package_dict.keys())) + print(bundle_map) # handle new data plane libraries new_data_plane_results = add_new_data_plane_packages( From 017369459fa63711444db49486a86576cd2b8f09 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Fri, 9 Jan 2026 10:53:50 -0800 Subject: [PATCH 061/112] fix bundling for new data plane yaml --- conda/update_conda_files.py | 40 +++++++++++++++++-------------------- 1 file changed, 18 insertions(+), 22 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 751944634b88..177a8f969772 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -457,16 +457,12 @@ def get_package_metadata(package_name: str, package_path: Optional[str]) -> Tupl def generate_data_plane_meta_yaml( package_dict: Dict[str, Dict[str, str]], + bundle_map: Dict[str, List[str]], package_name: str, - group_name: Optional[str], - group_data: Optional[dict], + bundle_name: Optional[str], ) -> str: """ Generate the meta.yaml content for a data plane package or release group. - - :param package_dict: Dictionary mapping package names to their CSV row data. - :param package_name: The name of the package to generate meta.yaml for. - :param group: Whether the meta.yaml is for a single package or group. """ # TODO is it correct that the env var name is arbitrary and replaced in conda_functions.py? @@ -475,16 +471,16 @@ def generate_data_plane_meta_yaml( # TODO not sure if this is the best way to get these requirements # TODO don't think this covers all possible import tests, e.g. azure.eventgrid, azure.eventgrid.aio <- when would I add that? - if group_name and group_data: + if bundle_name: # handle grouped packages logger.info( - f"Generating meta.yaml for release group {group_name} including packages: {group_data['packages']}" + f"Generating meta.yaml for release group {bundle_name} including packages: {bundle_map[bundle_name]}" ) host_reqs = set() run_reqs = set() pkg_imports = [] - for pkg in group_data["packages"]: + for pkg in bundle_map[bundle_name]: package_path = get_package_path(pkg) parsed_setup = ParsedSetup.from_path(package_path) @@ -496,9 +492,9 @@ def generate_data_plane_meta_yaml( host_reqs = list(host_reqs) run_reqs = list(run_reqs) - package_path = get_package_path(group_data["packages"][0]) + package_path = get_package_path(bundle_map[bundle_name][0]) home_url, summary, description = get_package_metadata( - group_name, package_path + bundle_name, package_path ) else: logger.info(f"Generating meta.yaml for package {package_name}") @@ -559,7 +555,7 @@ def generate_data_plane_meta_yaml( def add_new_data_plane_packages( - package_dict: Dict[str, Dict[str, str]], new_data_plane_names: List[str] + package_dict: Dict[str, Dict[str, str]], bundle_map: Dict[str, List[str]], new_data_plane_names: List[str] ) -> List[str]: """Create meta.yaml files for new data plane packages and add import tests.""" if len(new_data_plane_names) == 0: @@ -568,29 +564,28 @@ def add_new_data_plane_packages( logger.info(f"Adding {len(new_data_plane_names)} new data plane packages") result = [] - # grouped packages are processed once when encountering the first package in that group - group_names_processed = set() + # bundles are processed once when encountering the first package in that group + bundles_processed = set() for package_name in new_data_plane_names: logger.info(f"Adding new data plane meta.yaml for: {package_name}") pkg_yaml_path = os.path.join(CONDA_RECIPES_DIR, package_name, "meta.yaml") os.makedirs(os.path.dirname(pkg_yaml_path), exist_ok=True) - group_name = get_release_group(package_name, get_package_to_group_mapping()) - group_data = get_package_group_data(group_name) + bundle_name = get_bundle_name(package_name) - if group_data and group_name in group_names_processed: + if bundle_name and bundle_name in bundles_processed: logger.info( - f"Meta.yaml for group {group_name} already created, skipping {package_name}" + f"Meta.yaml for bundle {bundle_name} already created, skipping {package_name}" ) continue try: meta_yml = generate_data_plane_meta_yaml( - package_dict, package_name, group_name, group_data + package_dict, bundle_map, package_name, bundle_name ) - if group_data: - group_names_processed.add(group_name) + if bundle_name: + bundles_processed.add(bundle_name) except Exception as e: logger.error( f"Failed to generate meta.yaml content for {package_name} and skipping, error: {e}" @@ -1000,11 +995,12 @@ def update_mgmt_plane_release_log( # pre-process bundled packages to minimize file writes for new data plane packages, # and release logs bundle_map = map_bundle_to_packages(list(package_dict.keys())) + # TODO testing print(bundle_map) # handle new data plane libraries new_data_plane_results = add_new_data_plane_packages( - package_dict, new_data_plane_names + package_dict, bundle_map, new_data_plane_names ) exit() From c7d670554995a33a0ed21bc241a2da2b4499a4d4 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Fri, 9 Jan 2026 15:59:06 -0800 Subject: [PATCH 062/112] add conda section to pyprojects of established bundles --- .../azure-communication-callautomation/pyproject.toml | 6 +++++- sdk/communication/azure-communication-chat/pyproject.toml | 4 ++++ sdk/communication/azure-communication-email/pyproject.toml | 4 ++++ .../azure-communication-identity/pyproject.toml | 4 ++++ .../azure-communication-jobrouter/pyproject.toml | 4 ++++ .../azure-communication-messages/pyproject.toml | 4 ++++ .../azure-communication-phonenumbers/pyproject.toml | 6 +++++- sdk/communication/azure-communication-rooms/pyproject.toml | 4 ++++ sdk/communication/azure-communication-sms/pyproject.toml | 4 ++++ .../azure-eventhub-checkpointstoreblob-aio/pyproject.toml | 4 ++++ .../azure-eventhub-checkpointstoreblob/pyproject.toml | 6 +++++- sdk/eventhub/azure-eventhub/pyproject.toml | 4 ++++ sdk/keyvault/azure-keyvault-administration/pyproject.toml | 4 ++++ sdk/keyvault/azure-keyvault-certificates/pyproject.toml | 4 ++++ sdk/keyvault/azure-keyvault-keys/pyproject.toml | 4 ++++ sdk/keyvault/azure-keyvault-secrets/pyproject.toml | 4 ++++ .../azure-schemaregistry-avroencoder/pyproject.toml | 4 ++++ sdk/schemaregistry/azure-schemaregistry/pyproject.toml | 4 ++++ sdk/storage/azure-storage-blob/pyproject.toml | 4 ++++ sdk/storage/azure-storage-file-datalake/pyproject.toml | 4 ++++ sdk/storage/azure-storage-file-share/pyproject.toml | 4 ++++ sdk/storage/azure-storage-queue/pyproject.toml | 4 ++++ 22 files changed, 91 insertions(+), 3 deletions(-) diff --git a/sdk/communication/azure-communication-callautomation/pyproject.toml b/sdk/communication/azure-communication-callautomation/pyproject.toml index ab509fcf3611..467396480b4f 100644 --- a/sdk/communication/azure-communication-callautomation/pyproject.toml +++ b/sdk/communication/azure-communication-callautomation/pyproject.toml @@ -1,3 +1,7 @@ [tool.azure-sdk-build] pyright = false -verifytypes = false \ No newline at end of file +verifytypes = false + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-communication" \ No newline at end of file diff --git a/sdk/communication/azure-communication-chat/pyproject.toml b/sdk/communication/azure-communication-chat/pyproject.toml index e00361912969..0f2d30efad8f 100644 --- a/sdk/communication/azure-communication-chat/pyproject.toml +++ b/sdk/communication/azure-communication-chat/pyproject.toml @@ -1,2 +1,6 @@ [tool.azure-sdk-build] pyright = false + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-communication" diff --git a/sdk/communication/azure-communication-email/pyproject.toml b/sdk/communication/azure-communication-email/pyproject.toml index e00361912969..0f2d30efad8f 100644 --- a/sdk/communication/azure-communication-email/pyproject.toml +++ b/sdk/communication/azure-communication-email/pyproject.toml @@ -1,2 +1,6 @@ [tool.azure-sdk-build] pyright = false + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-communication" diff --git a/sdk/communication/azure-communication-identity/pyproject.toml b/sdk/communication/azure-communication-identity/pyproject.toml index 9cfeff7ce263..fbc3cd0f05e1 100644 --- a/sdk/communication/azure-communication-identity/pyproject.toml +++ b/sdk/communication/azure-communication-identity/pyproject.toml @@ -1,5 +1,9 @@ [tool.azure-sdk-build] pyright = false +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-communication" + [tool.pytest.ini_options] asyncio_default_fixture_loop_scope = "function" diff --git a/sdk/communication/azure-communication-jobrouter/pyproject.toml b/sdk/communication/azure-communication-jobrouter/pyproject.toml index 47f59e97f35e..9b4e3c02f286 100644 --- a/sdk/communication/azure-communication-jobrouter/pyproject.toml +++ b/sdk/communication/azure-communication-jobrouter/pyproject.toml @@ -1,3 +1,7 @@ [tool.azure-sdk-build] pyright = false sphinx = false + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-communication" diff --git a/sdk/communication/azure-communication-messages/pyproject.toml b/sdk/communication/azure-communication-messages/pyproject.toml index e00361912969..0f2d30efad8f 100644 --- a/sdk/communication/azure-communication-messages/pyproject.toml +++ b/sdk/communication/azure-communication-messages/pyproject.toml @@ -1,2 +1,6 @@ [tool.azure-sdk-build] pyright = false + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-communication" diff --git a/sdk/communication/azure-communication-phonenumbers/pyproject.toml b/sdk/communication/azure-communication-phonenumbers/pyproject.toml index f51cbbbb7407..adac01aee06d 100644 --- a/sdk/communication/azure-communication-phonenumbers/pyproject.toml +++ b/sdk/communication/azure-communication-phonenumbers/pyproject.toml @@ -1,2 +1,6 @@ [tool.azure-sdk-build] -pyright = false \ No newline at end of file +pyright = false + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-communication" \ No newline at end of file diff --git a/sdk/communication/azure-communication-rooms/pyproject.toml b/sdk/communication/azure-communication-rooms/pyproject.toml index e00361912969..0f2d30efad8f 100644 --- a/sdk/communication/azure-communication-rooms/pyproject.toml +++ b/sdk/communication/azure-communication-rooms/pyproject.toml @@ -1,2 +1,6 @@ [tool.azure-sdk-build] pyright = false + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-communication" diff --git a/sdk/communication/azure-communication-sms/pyproject.toml b/sdk/communication/azure-communication-sms/pyproject.toml index e00361912969..0f2d30efad8f 100644 --- a/sdk/communication/azure-communication-sms/pyproject.toml +++ b/sdk/communication/azure-communication-sms/pyproject.toml @@ -1,2 +1,6 @@ [tool.azure-sdk-build] pyright = false + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-communication" diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/pyproject.toml b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/pyproject.toml index 03685d81d31a..6662340bf319 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/pyproject.toml +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob-aio/pyproject.toml @@ -55,3 +55,7 @@ latestdependency = false mindependency = false whl_no_aio = false black = false + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-eventhub" diff --git a/sdk/eventhub/azure-eventhub-checkpointstoreblob/pyproject.toml b/sdk/eventhub/azure-eventhub-checkpointstoreblob/pyproject.toml index 4ea03c30e600..bd54471fd812 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoreblob/pyproject.toml +++ b/sdk/eventhub/azure-eventhub-checkpointstoreblob/pyproject.toml @@ -50,4 +50,8 @@ pyright = false type_check_samples = true verifytypes = false pylint = true -black = false \ No newline at end of file +black = false + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-eventhub" \ No newline at end of file diff --git a/sdk/eventhub/azure-eventhub/pyproject.toml b/sdk/eventhub/azure-eventhub/pyproject.toml index 5437968d6e13..83e31b582699 100644 --- a/sdk/eventhub/azure-eventhub/pyproject.toml +++ b/sdk/eventhub/azure-eventhub/pyproject.toml @@ -46,3 +46,7 @@ pyright = false type_check_samples = true verifytypes = true pylint = true + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-eventhub" diff --git a/sdk/keyvault/azure-keyvault-administration/pyproject.toml b/sdk/keyvault/azure-keyvault-administration/pyproject.toml index e00361912969..f3c3c463cfe0 100644 --- a/sdk/keyvault/azure-keyvault-administration/pyproject.toml +++ b/sdk/keyvault/azure-keyvault-administration/pyproject.toml @@ -1,2 +1,6 @@ [tool.azure-sdk-build] pyright = false + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-keyvault" diff --git a/sdk/keyvault/azure-keyvault-certificates/pyproject.toml b/sdk/keyvault/azure-keyvault-certificates/pyproject.toml index e00361912969..f3c3c463cfe0 100644 --- a/sdk/keyvault/azure-keyvault-certificates/pyproject.toml +++ b/sdk/keyvault/azure-keyvault-certificates/pyproject.toml @@ -1,2 +1,6 @@ [tool.azure-sdk-build] pyright = false + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-keyvault" diff --git a/sdk/keyvault/azure-keyvault-keys/pyproject.toml b/sdk/keyvault/azure-keyvault-keys/pyproject.toml index eec044c8763e..e7036b8d69fa 100644 --- a/sdk/keyvault/azure-keyvault-keys/pyproject.toml +++ b/sdk/keyvault/azure-keyvault-keys/pyproject.toml @@ -47,6 +47,10 @@ pytyped = ["py.typed"] [tool.azure-sdk-build] pyright = false +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-keyvault" + [tool.uv.sources] azure-core = { path = "../../core/azure-core" } azure-keyvault-nspkg = { path = "../../nspkg/azure-keyvault-nspkg" } diff --git a/sdk/keyvault/azure-keyvault-secrets/pyproject.toml b/sdk/keyvault/azure-keyvault-secrets/pyproject.toml index 7272c6cb4690..1b1c5f5f5c9c 100644 --- a/sdk/keyvault/azure-keyvault-secrets/pyproject.toml +++ b/sdk/keyvault/azure-keyvault-secrets/pyproject.toml @@ -1 +1,5 @@ [tool.azure-sdk-build] + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-keyvault" diff --git a/sdk/schemaregistry/azure-schemaregistry-avroencoder/pyproject.toml b/sdk/schemaregistry/azure-schemaregistry-avroencoder/pyproject.toml index 78755ba24174..4b8523b0645a 100644 --- a/sdk/schemaregistry/azure-schemaregistry-avroencoder/pyproject.toml +++ b/sdk/schemaregistry/azure-schemaregistry-avroencoder/pyproject.toml @@ -3,3 +3,7 @@ mypy = true pyright = false type_check_samples = true verifytypes = true + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-schemaregistry" diff --git a/sdk/schemaregistry/azure-schemaregistry/pyproject.toml b/sdk/schemaregistry/azure-schemaregistry/pyproject.toml index 49f351cfcef9..031caadada00 100644 --- a/sdk/schemaregistry/azure-schemaregistry/pyproject.toml +++ b/sdk/schemaregistry/azure-schemaregistry/pyproject.toml @@ -2,3 +2,7 @@ mypy = true pyright = false type_check_samples = true + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-schemaregistry" diff --git a/sdk/storage/azure-storage-blob/pyproject.toml b/sdk/storage/azure-storage-blob/pyproject.toml index 34719d6d26b0..2aff96cb2231 100644 --- a/sdk/storage/azure-storage-blob/pyproject.toml +++ b/sdk/storage/azure-storage-blob/pyproject.toml @@ -5,3 +5,7 @@ type_check_samples = true verifytypes = true strict_sphinx = true black = false + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-storage" diff --git a/sdk/storage/azure-storage-file-datalake/pyproject.toml b/sdk/storage/azure-storage-file-datalake/pyproject.toml index fac4a5d5db0c..9b7b935bb394 100644 --- a/sdk/storage/azure-storage-file-datalake/pyproject.toml +++ b/sdk/storage/azure-storage-file-datalake/pyproject.toml @@ -4,3 +4,7 @@ pyright = false type_check_samples = true verifytypes = false black = false + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-storage" diff --git a/sdk/storage/azure-storage-file-share/pyproject.toml b/sdk/storage/azure-storage-file-share/pyproject.toml index b04c8ccc0c0e..fd91dad74097 100644 --- a/sdk/storage/azure-storage-file-share/pyproject.toml +++ b/sdk/storage/azure-storage-file-share/pyproject.toml @@ -3,3 +3,7 @@ mypy = true pyright = false type_check_samples = true black = false + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-storage" diff --git a/sdk/storage/azure-storage-queue/pyproject.toml b/sdk/storage/azure-storage-queue/pyproject.toml index 7ea997ba706c..6c32f6f3d4b7 100644 --- a/sdk/storage/azure-storage-queue/pyproject.toml +++ b/sdk/storage/azure-storage-queue/pyproject.toml @@ -4,3 +4,7 @@ pyright = false type_check_samples = true verifytypes = true black = true + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-storage" From 98f0acec322dd2abde27eb670c7264ab99642594 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Fri, 9 Jan 2026 16:22:06 -0800 Subject: [PATCH 063/112] minor helper improvement --- conda/conda_helper_functions.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/conda/conda_helper_functions.py b/conda/conda_helper_functions.py index eaedc1172d28..d069be35bd92 100644 --- a/conda/conda_helper_functions.py +++ b/conda/conda_helper_functions.py @@ -53,14 +53,16 @@ def get_bundle_name(package_name: str) -> Optional[str]: # TODO raise something logger.error(f"Failed to parse setup for package {package_name}") return None + + conda_config = parsed.get_conda_config() - # don't expect beta releases to have conda config, TODO raise something, as we shouldn't be calling this on betas - if not parsed.is_stable_release(): + if not conda_config: + logger.warning(f"No conda config found for package {package_name}") + if parsed.is_stable_release(): + # TODO raise something + logger.error(f"Stable release package {package_name} needs a conda config") return None - - # TODO raise something if conda_config is missing - - conda_config = parsed.get_conda_config() + if conda_config and "bundle_name" in conda_config: return conda_config["bundle_name"] @@ -73,6 +75,7 @@ def map_bundle_to_packages(package_names: List[str]) -> Dict[str, List[str]]: for package_name in package_names: logger.debug(f"Processing package for bundle mapping: {package_name}") bundle_name = get_bundle_name(package_name) + logger.debug(f"Bundle name for package {package_name}: {bundle_name}") if bundle_name: if bundle_name not in bundle_map: bundle_map[bundle_name] = [] From a377760882cdc40882baeea9cdd38a8fc8698508 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Mon, 12 Jan 2026 14:19:28 -0800 Subject: [PATCH 064/112] clean and bundle map progress --- conda/conda_helper_functions.py | 42 +++++++++++++++++++++++++-------- conda/update_conda_files.py | 17 +++++-------- 2 files changed, 38 insertions(+), 21 deletions(-) diff --git a/conda/conda_helper_functions.py b/conda/conda_helper_functions.py index d069be35bd92..2e5a2be74640 100644 --- a/conda/conda_helper_functions.py +++ b/conda/conda_helper_functions.py @@ -12,9 +12,9 @@ from datetime import datetime from ci_tools.parsing import ParsedSetup -# TODO move the constants into a third file ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) SDK_DIR = os.path.join(ROOT_DIR, "sdk") + AZURE_SDK_CSV_URL = "https://raw.githubusercontent.com/Azure/azure-sdk/main/_data/releases/latest/python-packages.csv" PACKAGE_COL = "Package" LATEST_GA_DATE_COL = "LatestGADate" @@ -24,6 +24,7 @@ SERVICE_NAME_COL = "ServiceName" REPO_PATH_COL = "RepoPath" TYPE_COL = "Type" +SUPPORT_COL = "Support" # ===================================== # Helpers for handling bundled releases @@ -57,10 +58,9 @@ def get_bundle_name(package_name: str) -> Optional[str]: conda_config = parsed.get_conda_config() if not conda_config: - logger.warning(f"No conda config found for package {package_name}") if parsed.is_stable_release(): # TODO raise something - logger.error(f"Stable release package {package_name} needs a conda config") + logger.warning(f"Stable release package {package_name} needs a conda config") return None if conda_config and "bundle_name" in conda_config: @@ -70,16 +70,38 @@ def get_bundle_name(package_name: str) -> Optional[str]: def map_bundle_to_packages(package_names: List[str]) -> Dict[str, List[str]]: """Create a mapping of bundle names to their constituent package names.""" - bundle_map = {} + logger.info("Mapping bundle names to packages...") + all_paths = glob.glob(os.path.join(SDK_DIR, "*", "*")) + # Exclude temp directories like .tox, .venv, __pycache__, etc. + path_lookup = { + os.path.basename(p): p + for p in all_paths + if os.path.isdir(p) and not os.path.basename(p).startswith((".", "__")) + } + bundle_map = {} for package_name in package_names: logger.debug(f"Processing package for bundle mapping: {package_name}") - bundle_name = get_bundle_name(package_name) - logger.debug(f"Bundle name for package {package_name}: {bundle_name}") - if bundle_name: - if bundle_name not in bundle_map: - bundle_map[bundle_name] = [] - bundle_map[bundle_name].append(package_name) + package_path = path_lookup.get(package_name) + if not package_path: + logger.warning(f"Package path not found for {package_name}") + continue + + # Skip directories without pyproject.toml + if not os.path.exists(os.path.join(package_path, "pyproject.toml")): + logger.warning(f"Skipping {package_name}: no pyproject.toml found") + continue + + parsed = ParsedSetup.from_path(package_path) + if not parsed: + logger.error(f"Failed to parse setup for package {package_name}") + continue + + conda_config = parsed.get_conda_config() + if conda_config and "bundle_name" in conda_config: + bundle_name = conda_config["bundle_name"] + logger.debug(f"Bundle name for package {package_name}: {bundle_name}") + bundle_map.setdefault(bundle_name, []).append(package_name) return bundle_map diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 177a8f969772..746f285ea61c 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -18,7 +18,12 @@ build_package_index, get_package_path, get_bundle_name, - map_bundle_to_packages + map_bundle_to_packages, + PACKAGE_COL, + VERSION_GA_COL, + LATEST_GA_DATE_COL, + REPO_PATH_COL, + SUPPORT_COL, ) from conda_release_groups import ( @@ -41,16 +46,6 @@ # constants RELEASE_PERIOD_MONTHS = 3 -AZURE_SDK_CSV_URL = "https://raw.githubusercontent.com/Azure/azure-sdk/main/_data/releases/latest/python-packages.csv" -PACKAGE_COL = "Package" -LATEST_GA_DATE_COL = "LatestGADate" -VERSION_GA_COL = "VersionGA" -FIRST_GA_DATE_COL = "FirstGADate" -DISPLAY_NAME_COL = "DisplayName" -SERVICE_NAME_COL = "ServiceName" -REPO_PATH_COL = "RepoPath" -TYPE_COL = "Type" -SUPPORT_COL = "Support" # packages that should be shipped but are known to be missing from the csv PACKAGES_WITH_DOWNLOAD_URI = [ From d46bdd1fe02a66daf6c7966d00a112a36055dd20 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Tue, 13 Jan 2026 12:06:42 -0800 Subject: [PATCH 065/112] refactor release log updates for bundles --- conda/conda_helper_functions.py | 25 +++++---- conda/update_conda_files.py | 93 ++++++++++++++++----------------- 2 files changed, 60 insertions(+), 58 deletions(-) diff --git a/conda/conda_helper_functions.py b/conda/conda_helper_functions.py index 2e5a2be74640..4dd667928310 100644 --- a/conda/conda_helper_functions.py +++ b/conda/conda_helper_functions.py @@ -30,6 +30,7 @@ # Helpers for handling bundled releases # ===================================== + def get_package_path(package_name: str) -> Optional[str]: """Get the filesystem path of an SDK package given its name.""" pattern = os.path.join(SDK_DIR, "**", package_name) @@ -39,9 +40,10 @@ def get_package_path(package_name: str) -> Optional[str]: return None return matches[0] + def get_bundle_name(package_name: str) -> Optional[str]: """ - Check bundled release config from package's pyproject.toml file. + Check bundled release config from package's pyproject.toml file given the package name. If bundled, return the bundle name; otherwise, return None. """ @@ -51,23 +53,26 @@ def get_bundle_name(package_name: str) -> Optional[str]: return None parsed = ParsedSetup.from_path(package_path) if not parsed: - # TODO raise something + # TODO raise something logger.error(f"Failed to parse setup for package {package_name}") return None - + conda_config = parsed.get_conda_config() if not conda_config: if parsed.is_stable_release(): # TODO raise something - logger.warning(f"Stable release package {package_name} needs a conda config") + logger.warning( + f"Stable release package {package_name} needs a conda config" + ) return None if conda_config and "bundle_name" in conda_config: return conda_config["bundle_name"] - + return None + def map_bundle_to_packages(package_names: List[str]) -> Dict[str, List[str]]: """Create a mapping of bundle names to their constituent package names.""" logger.info("Mapping bundle names to packages...") @@ -78,7 +83,7 @@ def map_bundle_to_packages(package_names: List[str]) -> Dict[str, List[str]]: for p in all_paths if os.path.isdir(p) and not os.path.basename(p).startswith((".", "__")) } - + bundle_map = {} for package_name in package_names: logger.debug(f"Processing package for bundle mapping: {package_name}") @@ -86,17 +91,17 @@ def map_bundle_to_packages(package_names: List[str]) -> Dict[str, List[str]]: if not package_path: logger.warning(f"Package path not found for {package_name}") continue - + # Skip directories without pyproject.toml if not os.path.exists(os.path.join(package_path, "pyproject.toml")): logger.warning(f"Skipping {package_name}: no pyproject.toml found") continue - + parsed = ParsedSetup.from_path(package_path) if not parsed: logger.error(f"Failed to parse setup for package {package_name}") continue - + conda_config = parsed.get_conda_config() if conda_config and "bundle_name" in conda_config: bundle_name = conda_config["bundle_name"] @@ -105,10 +110,12 @@ def map_bundle_to_packages(package_names: List[str]) -> Dict[str, List[str]]: return bundle_map + # ===================================== # Utility functions for parsing data # ===================================== + def parse_csv() -> List[Dict[str, str]]: """Download and parse the Azure SDK Python packages CSV file.""" try: diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 746f285ea61c..83f05032a8f4 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -26,12 +26,6 @@ SUPPORT_COL, ) -from conda_release_groups import ( - get_package_group_data, - get_release_group, - get_package_to_group_mapping, -) - # paths ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) SDK_DIR = os.path.join(ROOT_DIR, "sdk") @@ -425,7 +419,9 @@ def get_package_requirements(parsed: ParsedSetup) -> Tuple[List[str], List[str]] return list(host_requirements), list(run_requirements) -def get_package_metadata(package_name: str, package_path: Optional[str]) -> Tuple[str, str, str]: +def get_package_metadata( + package_name: str, package_path: Optional[str] +) -> Tuple[str, str, str]: """Extract package metadata for about section in meta.yaml.""" pkg_metadata = extract_package_metadata(package_path) if package_path: @@ -488,9 +484,7 @@ def generate_data_plane_meta_yaml( run_reqs = list(run_reqs) package_path = get_package_path(bundle_map[bundle_name][0]) - home_url, summary, description = get_package_metadata( - bundle_name, package_path - ) + home_url, summary, description = get_package_metadata(bundle_name, package_path) else: logger.info(f"Generating meta.yaml for package {package_name}") package_path = get_package_path(package_name) @@ -550,7 +544,9 @@ def generate_data_plane_meta_yaml( def add_new_data_plane_packages( - package_dict: Dict[str, Dict[str, str]], bundle_map: Dict[str, List[str]], new_data_plane_names: List[str] + package_dict: Dict[str, Dict[str, str]], + bundle_map: Dict[str, List[str]], + new_data_plane_names: List[str], ) -> List[str]: """Create meta.yaml files for new data plane packages and add import tests.""" if len(new_data_plane_names) == 0: @@ -604,11 +600,13 @@ def add_new_data_plane_packages( # ===================================== -def add_new_mgmt_plane_packages(new_packages: List[Dict[str, str]]) -> List[str]: +def add_new_mgmt_plane_packages( + package_dict: Dict[str, Dict[str, str]], new_mgmt_plane_names: List[str] +) -> List[str]: """Update azure-mgmt/meta.yaml with new management libraries, and add import tests.""" - if len(new_packages) == 0: + if len(new_mgmt_plane_names) == 0: return [] - logger.info(f"Adding {len(new_packages)} new management plane packages") + logger.info(f"Adding {len(new_mgmt_plane_names)} new management plane packages") result = [] # can't use pyyaml due to jinja2 @@ -620,7 +618,7 @@ def add_new_mgmt_plane_packages(new_packages: List[Dict[str, str]]) -> List[str] ) if not test_match: logger.error("Could not find 'test: imports:' section in meta.yaml") - result.extend([pkg.get(PACKAGE_COL) for pkg in new_packages]) + result.extend(new_mgmt_plane_names) return result existing_imports_text = test_match.group(1) @@ -631,15 +629,11 @@ def add_new_mgmt_plane_packages(new_packages: List[Dict[str, str]]) -> List[str] ] new_imports = [] - for pkg in new_packages: - package_name = pkg.get(PACKAGE_COL) + for package_name in new_mgmt_plane_names: if not package_name: logger.warning("Skipping package with missing name") continue - # TODO there are some existing packages that have hyphens instead of . which seems wrong? - # ^ should manually edit these before running this script coz it messes with alphabetical sort - module_name = package_name.replace("-", ".") imports = [ @@ -675,9 +669,11 @@ def add_new_mgmt_plane_packages(new_packages: List[Dict[str, str]]) -> List[str] file.write(updated_content) except Exception as e: logger.error(f"Failed to update {CONDA_MGMT_META_YAML_PATH}: {e}") - result.extend([pkg.get(PACKAGE_COL) for pkg in new_packages]) + result.extend(new_mgmt_plane_names) - logger.info(f"Added {len(new_packages)} new management plane packages to meta.yaml") + logger.info( + f"Added {len(new_mgmt_plane_names)} new management plane packages to meta.yaml" + ) return result @@ -688,6 +684,7 @@ def add_new_mgmt_plane_packages(new_packages: List[Dict[str, str]]) -> List[str] def update_data_plane_release_logs( package_dict: Dict, + bundle_map: Dict[str, List[str]], new_data_plane_names: List[str], release_date: str, ) -> List[str]: @@ -695,11 +692,8 @@ def update_data_plane_release_logs( Add and update release logs for data plane conda packages. Release log includes versions of all packages for the release """ result = [] - package_to_group = get_package_to_group_mapping() # Update all existing data plane release logs by file - # NOTE: for new packages added to an existing release group, this should handle that as well - # as long as conda_release_groups.py was updated to include the new package in the group existing_release_logs = glob.glob(os.path.join(CONDA_RELEASE_LOGS_DIR, "*.md")) for release_log_path in existing_release_logs: @@ -709,7 +703,7 @@ def update_data_plane_release_logs( continue if ( curr_service_name not in package_dict - and curr_service_name not in package_to_group.values() + and curr_service_name not in bundle_map.values() ): logger.warning( f"Existing release log service {curr_service_name} was not found in CSV data, skipping update. It may be deprecated." @@ -717,20 +711,18 @@ def update_data_plane_release_logs( result.append(curr_service_name) continue - group_name = get_release_group(curr_service_name, package_to_group) - group_data = get_package_group_data(group_name) - pkg_updates = [] - if group_data: - pkg_names_in_log = group_data["packages"] - for pkg_name in pkg_names_in_log: + if curr_service_name in bundle_map: + # handle grouped packages + pkg_names_in_bundle = bundle_map[curr_service_name] + for pkg_name in pkg_names_in_bundle: pkg = package_dict.get(pkg_name, {}) version = pkg.get(VERSION_GA_COL) if version: pkg_updates.append(f"- {pkg_name}-{version}") else: logger.warning( - f"Package {pkg_name} in group {group_name} is missing version info, it may be deprecated. Skipping in release log update" + f"Package {pkg_name} in group {curr_service_name} is missing version info, it may be deprecated. Skipping in release log update" ) result.append(pkg_name) else: @@ -796,21 +788,21 @@ def update_data_plane_release_logs( result.append(package_name) continue - # check for group - group_name = get_release_group(package_name, get_package_to_group_mapping()) - group_data = get_package_group_data(group_name) - if group_data: - release_log_path = os.path.join(CONDA_RELEASE_LOGS_DIR, f"{group_name}.md") + bundle_name = get_bundle_name(package_name) + # check for bundle + if bundle_name: + release_log_path = os.path.join(CONDA_RELEASE_LOGS_DIR, f"{bundle_name}.md") else: release_log_path = os.path.join( CONDA_RELEASE_LOGS_DIR, f"{package_name}.md" ) + bundle_name = package_name # for release log logic below if not os.path.exists(release_log_path): # Add brand new release log file - logger.info(f"Creating new release log for: {group_name}") + logger.info(f"Creating new release log for: {bundle_name}") - title_parts = group_name.replace("azure-", "").split("-") + title_parts = bundle_name.replace("azure-", "").split("-") title = " ".join(word.title() for word in title_parts) content = f"# Azure {title} client library for Python (conda)\n\n" @@ -818,8 +810,8 @@ def update_data_plane_release_logs( content += "### Packages included\n\n" pkg_updates = [] - if group_data: - pkg_names_in_log = group_data["packages"] + if bundle_name: + pkg_names_in_log = bundle_map.get(bundle_name, []) for pkg_name in pkg_names_in_log: pkg = package_dict.get(pkg_name, {}) version = pkg.get(VERSION_GA_COL) @@ -833,14 +825,14 @@ def update_data_plane_release_logs( try: with open(release_log_path, "w") as f: f.write(content) - logger.info(f"Created new release log for {group_name}") + logger.info(f"Created new release log for {bundle_name}") except Exception as e: - logger.error(f"Failed to create release log for {group_name}: {e}") - result.append(group_name) + logger.error(f"Failed to create release log for {bundle_name}: {e}") + result.append(bundle_name) else: logger.info( - f"Release log for {group_name} already exists, check that new package {package_name} is included" + f"Release log for {bundle_name} already exists, check that new package {package_name} is included" ) return result @@ -997,16 +989,19 @@ def update_mgmt_plane_release_log( new_data_plane_results = add_new_data_plane_packages( package_dict, bundle_map, new_data_plane_names ) - exit() # handle new mgmt plane libraries - new_mgmt_plane_results = add_new_mgmt_plane_packages(new_mgmt_plane_packages) + new_mgmt_plane_results = add_new_mgmt_plane_packages( + package_dict, new_mgmt_plane_names + ) # add/update release logs data_plane_release_log_results = update_data_plane_release_logs( - package_dict, new_data_plane_names, new_version + package_dict, bundle_map, new_data_plane_names, new_version ) + exit() + mgmt_plane_release_log_results = update_mgmt_plane_release_log( package_dict, all_mgmt_packages, new_version ) From a2715c23b31826c273f385f0f0bcb2d4d0dc7601 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Tue, 13 Jan 2026 13:40:51 -0800 Subject: [PATCH 066/112] mgmt release log --- conda/update_conda_files.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 83f05032a8f4..e47d139f8400 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -967,11 +967,13 @@ def update_mgmt_plane_release_log( package_dict = {pkg.get(PACKAGE_COL, ""): pkg for pkg in packages} # Extract package names from the filtered lists - outdated_package_names = [ - pkg.get(PACKAGE_COL, "") - for pkg in (outdated_data_pkgs + outdated_mgmt_pkgs) - if pkg.get(PACKAGE_COL) + outdated_data_pkg_names = [ + pkg.get(PACKAGE_COL, "") for pkg in outdated_data_pkgs if pkg.get(PACKAGE_COL) + ] + outdated_mgmt_pkg_names = [ + pkg.get(PACKAGE_COL, "") for pkg in outdated_mgmt_pkgs if pkg.get(PACKAGE_COL) ] + outdated_package_names = outdated_data_pkg_names + outdated_mgmt_pkg_names # update conda-sdk-client.yml # TODO handle packages missing from conda-sdk-client that aren't new relative to the last release... @@ -1000,10 +1002,8 @@ def update_mgmt_plane_release_log( package_dict, bundle_map, new_data_plane_names, new_version ) - exit() - mgmt_plane_release_log_results = update_mgmt_plane_release_log( - package_dict, all_mgmt_packages, new_version + package_dict, outdated_mgmt_pkg_names + new_mgmt_plane_names, new_version ) # TODO AKA link logic From da7a22f9da720b59f9160df189092f8d51779efd Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Tue, 13 Jan 2026 13:51:40 -0800 Subject: [PATCH 067/112] clean --- conda/update_conda_files.py | 38 ++++++++++++++++++++----------------- 1 file changed, 21 insertions(+), 17 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index e47d139f8400..22674f22f9cf 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -932,8 +932,6 @@ def update_mgmt_plane_release_log( logger.error("No packages found in CSV data.") exit(1) - # TODO clean this part up - # Only ship GA packages that are not deprecated packages = [ pkg @@ -946,16 +944,20 @@ def update_mgmt_plane_release_log( logger.info(f"Filtered to {len(packages)} GA packages") data_pkgs, mgmt_pkgs = separate_packages_by_type(packages) - outdated_data_pkgs = [ - pkg for pkg in data_pkgs if package_needs_update(pkg, old_version, is_new=False) + outdated_data_plane_names = [ + pkg.get(PACKAGE_COL, "") + for pkg in data_pkgs + if package_needs_update(pkg, old_version, is_new=False) ] new_data_plane_names = [ pkg.get(PACKAGE_COL, "") for pkg in data_pkgs if package_needs_update(pkg, old_version, is_new=True) ] - outdated_mgmt_pkgs = [ - pkg for pkg in mgmt_pkgs if package_needs_update(pkg, old_version, is_new=False) + outdated_mgmt_plane_names = [ + pkg.get(PACKAGE_COL, "") + for pkg in mgmt_pkgs + if package_needs_update(pkg, old_version, is_new=False) ] new_mgmt_plane_names = [ pkg.get(PACKAGE_COL, "") @@ -963,17 +965,18 @@ def update_mgmt_plane_release_log( if package_needs_update(pkg, old_version, is_new=True) ] + # don't overlap new packages with outdated packages + outdated_data_plane_names = [ + name for name in outdated_data_plane_names if name not in new_data_plane_names + ] + outdated_mgmt_plane_names = [ + name for name in outdated_mgmt_plane_names if name not in new_mgmt_plane_names + ] + # map package name to csv row for easy lookup package_dict = {pkg.get(PACKAGE_COL, ""): pkg for pkg in packages} - # Extract package names from the filtered lists - outdated_data_pkg_names = [ - pkg.get(PACKAGE_COL, "") for pkg in outdated_data_pkgs if pkg.get(PACKAGE_COL) - ] - outdated_mgmt_pkg_names = [ - pkg.get(PACKAGE_COL, "") for pkg in outdated_mgmt_pkgs if pkg.get(PACKAGE_COL) - ] - outdated_package_names = outdated_data_pkg_names + outdated_mgmt_pkg_names + outdated_package_names = outdated_data_plane_names + outdated_mgmt_plane_names # update conda-sdk-client.yml # TODO handle packages missing from conda-sdk-client that aren't new relative to the last release... @@ -984,8 +987,9 @@ def update_mgmt_plane_release_log( # pre-process bundled packages to minimize file writes for new data plane packages, # and release logs bundle_map = map_bundle_to_packages(list(package_dict.keys())) - # TODO testing - print(bundle_map) + logger.info( + f"Identified {len(bundle_map)} release bundles from package data: {bundle_map}" + ) # handle new data plane libraries new_data_plane_results = add_new_data_plane_packages( @@ -1003,7 +1007,7 @@ def update_mgmt_plane_release_log( ) mgmt_plane_release_log_results = update_mgmt_plane_release_log( - package_dict, outdated_mgmt_pkg_names + new_mgmt_plane_names, new_version + package_dict, outdated_mgmt_plane_names + new_mgmt_plane_names, new_version ) # TODO AKA link logic From 3fe98b6784eb6e3606fc764cac33df5ffe636300 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Tue, 13 Jan 2026 14:15:23 -0800 Subject: [PATCH 068/112] clean --- conda/conda_release_groups.py | 127 ------------------ conda/update_conda_files.py | 2 +- .../ci_tools/parsing/parse_functions.py | 2 + 3 files changed, 3 insertions(+), 128 deletions(-) delete mode 100644 conda/conda_release_groups.py diff --git a/conda/conda_release_groups.py b/conda/conda_release_groups.py deleted file mode 100644 index 31a1dff5bc75..000000000000 --- a/conda/conda_release_groups.py +++ /dev/null @@ -1,127 +0,0 @@ -""" -Mapping of Azure SDK package names to their release group, used for Conda -release file updates in update_conda_files.py - -New grouped packages should be registered before using the script -to update for Conda releases. - -Packages that are not listed here are treated as standalone packages, -each forming their own release group (excluding mgmt packages, which will -by default be grouped). - -Packages that are grouped together will: - 1. Share a single release log file (e.g., azure-communication.md for all communication packages) - 2. Be listed under one CondaArtifact entry in conda-sdk-client.yml - 3. Be released together under a single release parameter -""" - -RELEASE_GROUPS = { - # Core - "azure-core": { - "packages": ["azure-core", "azure-mgmt-core"], - "common_root": "azure", - "service": "core", - }, - # Communication - "azure-communication": { - "packages": [ - "azure-communication-chat", - "azure-communication-email", - "azure-communication-identity", - "azure-communication-phonenumbers", - "azure-communication-sms", - "azure-communication-callautomation", - "azure-communication-rooms", - "azure-communication-jobrouter", - "azure-communication-messages", - ], - "common_root": "azure/communication", - "service": "communication", - }, - # Storage - "azure-storage": { - "packages": [ - "azure-storage-blob", - "azure-storage-queue", - "azure-storage-file-share", - "azure-storage-file-datalake", - ], - "common_root": "azure/storage", - "service": "storage", - }, - # Schema Registry - "azure-schemaregistry": { - "packages": [ - "azure-schemaregistry", - "azure-schemaregistry-avroencoder", - ], - "common_root": "azure/schemaregistry", - "service": "schemaregistry", - }, - # Event Hub - "azure-eventhub": { - "packages": [ - "azure-eventhub", - "azure-eventhub-checkpointstoreblob", - "azure-eventhub-checkpointstoreblob-aio", - ], - "common_root": "azure/eventhub", - "service": "eventhub", - }, - "azure-keyvault": { - "packages": [ - "azure-keyvault-administration", - "azure-keyvault-secrets", - "azure-keyvault-keys", - "azure-keyvault-certificates", - ], - "common_root": "azure/keyvault", - "service": "keyvault", - }, - # Packages with other pattern exceptions, e.g. different common root - # or service vs package name mismatch - "msrest": {"packages": ["msrest"], "common_root": None}, - "msal": {"packages": ["msal"], "common_root": None}, - "msal-extensions": { - "packages": ["msal-extensions"], - "common_root": "msal", - }, - "azure-ai-vision": { - "packages": ["azure-ai-vision-imageanalysis"], - "common_root": "azure/vision", - }, - "azure-healthinsights": { - "packages": ["azure-healthinsights-radiologyinsights"], - "common_root": "azure", - "service": "healthinsights", - }, -} - - -# Reverse mapping: package name -> release group name -def get_package_to_group_mapping(): - mapping = {} - for group_name, group_info in RELEASE_GROUPS.items(): - for package in group_info["packages"]: - mapping[package] = group_name - return mapping - - -def get_release_group(package_name: str, package_to_group: dict) -> str: - """ - Get the release group name for a given package. - - :param package_name: The package name (e.g., "azure-core", "azure-communication-chat") - :return: The release group name (e.g., "azure-core", "azure-communication"), or package name itself if not grouped - """ - return package_to_group.get(package_name, package_name) - - -def get_package_group_data(group_name: str) -> dict: - """ - Get all packages that belong to a release group. - - :param group_name: The release group name - :return: The group data dictionary, or empty dict if not found - """ - return RELEASE_GROUPS.get(group_name, {}) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 22674f22f9cf..6d7e2229a28d 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -703,7 +703,7 @@ def update_data_plane_release_logs( continue if ( curr_service_name not in package_dict - and curr_service_name not in bundle_map.values() + and curr_service_name not in bundle_map ): logger.warning( f"Existing release log service {curr_service_name} was not found in CSV data, skipping update. It may be deprecated." diff --git a/eng/tools/azure-sdk-tools/ci_tools/parsing/parse_functions.py b/eng/tools/azure-sdk-tools/ci_tools/parsing/parse_functions.py index 4602a781a16c..0d41764f1d36 100644 --- a/eng/tools/azure-sdk-tools/ci_tools/parsing/parse_functions.py +++ b/eng/tools/azure-sdk-tools/ci_tools/parsing/parse_functions.py @@ -465,6 +465,7 @@ def get_build_config(package_path: str) -> Optional[Dict[str, Any]]: except: return {} + def get_conda_config(package_path: str) -> Optional[Dict[str, Any]]: """ Attempts to retrieve all values within [tools.azure-sdk-conda] section of a pyproject.toml. @@ -485,6 +486,7 @@ def get_conda_config(package_path: str) -> Optional[Dict[str, Any]]: except: return {} + def get_ci_config(package_path: str) -> Optional[Dict[str, Any]]: """ Attempts to retrieve the parsed toml content of a CI.yml associated with this package. From bf01e248d0cc14fc0c4f642781eddcec0f0cd3b1 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Tue, 13 Jan 2026 14:31:25 -0800 Subject: [PATCH 069/112] handle msal release log --- conda/update_conda_files.py | 24 +++++++++++++++++------- 1 file changed, 17 insertions(+), 7 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 6d7e2229a28d..887636020054 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -41,11 +41,12 @@ # constants RELEASE_PERIOD_MONTHS = 3 -# packages that should be shipped but are known to be missing from the csv -PACKAGES_WITH_DOWNLOAD_URI = [ - "msal", - "msal-extensions", -] +# packages that should be shipped but are known to be missing from the csv - store version here +PACKAGES_WITH_DOWNLOAD_URI = { + "msal": "", + "msal-extensions": "", +} + # ===================================== # Helpers for updating conda_env.yml @@ -170,6 +171,9 @@ def update_conda_sdk_client_yml( result.append(pkg_name) continue + # store retrieved version for release log + PACKAGES_WITH_DOWNLOAD_URI[pkg_name] = latest_version + if curr_download_uri != download_uri: # version needs update logger.info( @@ -704,6 +708,7 @@ def update_data_plane_release_logs( if ( curr_service_name not in package_dict and curr_service_name not in bundle_map + and curr_service_name not in PACKAGES_WITH_DOWNLOAD_URI ): logger.warning( f"Existing release log service {curr_service_name} was not found in CSV data, skipping update. It may be deprecated." @@ -726,8 +731,13 @@ def update_data_plane_release_logs( ) result.append(pkg_name) else: - pkg = package_dict.get(curr_service_name, {}) - version = pkg.get(VERSION_GA_COL) + # handle exception for packages with download_uri + if curr_service_name in PACKAGES_WITH_DOWNLOAD_URI: + version = PACKAGES_WITH_DOWNLOAD_URI[curr_service_name] + else: + pkg = package_dict.get(curr_service_name, {}) + version = pkg.get(VERSION_GA_COL) + if version: pkg_updates.append(f"- {curr_service_name}-{version}") else: From d413e402b8390987123892dfea6b9453323820a9 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Tue, 13 Jan 2026 14:37:26 -0800 Subject: [PATCH 070/112] minor --- conda/update_conda_files.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 887636020054..9b23a72130a7 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -734,6 +734,12 @@ def update_data_plane_release_logs( # handle exception for packages with download_uri if curr_service_name in PACKAGES_WITH_DOWNLOAD_URI: version = PACKAGES_WITH_DOWNLOAD_URI[curr_service_name] + if not version: + logger.warning( + f"Package {curr_service_name} with download_uri is missing version info, it may be deprecated. Skipping in release log update" + ) + result.append(curr_service_name) + continue else: pkg = package_dict.get(curr_service_name, {}) version = pkg.get(VERSION_GA_COL) From b2c90e03571d41ce82f9107137ad6f6dce42180e Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Tue, 13 Jan 2026 14:52:49 -0800 Subject: [PATCH 071/112] bug fix --- conda/update_conda_files.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 9b23a72130a7..eb7dd5a2c437 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -960,6 +960,7 @@ def update_mgmt_plane_release_log( logger.info(f"Filtered to {len(packages)} GA packages") data_pkgs, mgmt_pkgs = separate_packages_by_type(packages) + outdated_data_plane_names = [ pkg.get(PACKAGE_COL, "") for pkg in data_pkgs @@ -1022,8 +1023,10 @@ def update_mgmt_plane_release_log( package_dict, bundle_map, new_data_plane_names, new_version ) + all_mgmt_plane_names = [pkg.get(PACKAGE_COL, "") for pkg in mgmt_pkgs] + mgmt_plane_release_log_results = update_mgmt_plane_release_log( - package_dict, outdated_mgmt_plane_names + new_mgmt_plane_names, new_version + package_dict, all_mgmt_plane_names, new_version ) # TODO AKA link logic From d34e324450a5071b337bf3d95e9b469c0bf4acf4 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Wed, 14 Jan 2026 16:20:13 -0800 Subject: [PATCH 072/112] remove using metadata as summary, doesn't always work --- conda/update_conda_files.py | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index eb7dd5a2c437..8f3ce0a0e935 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -8,7 +8,7 @@ from datetime import datetime from dateutil.relativedelta import relativedelta from ci_tools.logging import logger, configure_logging -from ci_tools.parsing import ParsedSetup, extract_package_metadata +from ci_tools.parsing import ParsedSetup from typing import Dict, List, Optional, Tuple from conda_helper_functions import ( parse_csv, @@ -427,7 +427,6 @@ def get_package_metadata( package_name: str, package_path: Optional[str] ) -> Tuple[str, str, str]: """Extract package metadata for about section in meta.yaml.""" - pkg_metadata = extract_package_metadata(package_path) if package_path: service_dir = os.path.basename(os.path.dirname(package_path)) else: @@ -435,11 +434,7 @@ def get_package_metadata( service_dir = package_name.replace("azure-", "") home_url = f"https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/{service_dir}/{package_name}" - # TODO check correctness of this - if pkg_metadata and pkg_metadata.get("description"): - summary = pkg_metadata["description"] - else: - summary = f"Microsoft Azure {package_name.replace('azure-', '').replace('-', ' ').title()} Client Library for Python" + summary = f"Microsoft Azure {package_name.replace('azure-', '').replace('-', ' ').title()} Client Library for Python" # TODO definitely need to check if this is actually always correct conda_url = f"https://aka.ms/azsdk/conda/releases/{service_dir}" From 72d829374f51418be4ef22a18b99c9714699c07a Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Thu, 15 Jan 2026 11:07:14 -0800 Subject: [PATCH 073/112] minor clean --- conda/update_conda_files.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 8f3ce0a0e935..77f18a5837b8 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -322,10 +322,6 @@ def update_conda_sdk_client_yml( # sort mgmt packages alphabetically azure_mgmt_artifact_checkout.sort(key=lambda x: x["package"]) - # TODO note this dump doesn't preserve some quotes like around - # displayName: 'azure-developer-loadtesting' but i don't think those functionally necessary? - # double check that this is ok, esp for URLs... ^ - if updated_count > 0 or added_count > 0: with open(CONDA_CLIENT_YAML_PATH, "w") as file: yaml.dump( @@ -381,9 +377,12 @@ def format_requirement(req: str) -> str: name_unpinned = re.split(r"[>=={{{{ environ.get('AZURESDK_CONDA_VERSION', '0.0.0') }}}}" + + # filter out ~ for yml format + req = req.replace("~", "") return req @@ -620,6 +619,7 @@ def add_new_mgmt_plane_packages( result.extend(new_mgmt_plane_names) return result + # TODO probably need to automate removal of deprecated packages from here existing_imports_text = test_match.group(1) existing_imports = [ line.strip() @@ -991,7 +991,6 @@ def update_mgmt_plane_release_log( outdated_package_names = outdated_data_plane_names + outdated_mgmt_plane_names # update conda-sdk-client.yml - # TODO handle packages missing from conda-sdk-client that aren't new relative to the last release... conda_sdk_client_pkgs_result = update_conda_sdk_client_yml( package_dict, outdated_package_names, new_data_plane_names, new_mgmt_plane_names ) From 7db3d2002905077c252c79958c7f5755c4847d36 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Wed, 21 Jan 2026 16:01:29 -0800 Subject: [PATCH 074/112] initial pipeline attempt --- eng/pipelines/conda-update-pipeline.yml | 52 +++++++++++++++++++++++++ 1 file changed, 52 insertions(+) create mode 100644 eng/pipelines/conda-update-pipeline.yml diff --git a/eng/pipelines/conda-update-pipeline.yml b/eng/pipelines/conda-update-pipeline.yml new file mode 100644 index 000000000000..5d74cf537050 --- /dev/null +++ b/eng/pipelines/conda-update-pipeline.yml @@ -0,0 +1,52 @@ +extends: + template: /eng/pipelines/templates/stages/1es-redirect.yml + parameters: + stages: + - stage: UpdateCondaFiles + displayName: Update Conda Files + + jobs: + - job: UpdateCondaFilesJob + timeoutInMinutes: 90 + displayName: Update Conda Files and Submit PR + variables: + - template: /eng/pipelines/templates/variables/globals.yml + + pool: + name: azsdk-pool + image: ubuntu-24.04 + os: linux + + steps: + - checkout: self + persistCredentials: true + + - task: UsePythonVersion@0 + displayName: 'Use Python 3.11' + inputs: + versionSpec: '3.11' + + - script: | + python -m pip install --upgrade pip + python -m pip install "eng/tools/azure-sdk-tools[build]" + displayName: 'Prep Environment' + + - script: | + python conda/update_conda_files.py + displayName: 'Update Conda Files' + + - template: /eng/common/pipelines/templates/steps/create-pull-request.yml + parameters: + PRBranchName: conda-update-$(Build.BuildId) + CommitMsg: 'Update conda files' + PRTitle: 'Conda Release Update generated from $(Build.BuildId)' + PRBody: | + This PR was automatically generated to update conda files. + + - Updates package versions in conda-sdk-client.yml + - Adds new packages if detected + - Updates yamls and changelogs + + Build: $(Build.BuildId) + BaseBranchName: main + displayName: 'Create Pull Request' \ No newline at end of file From ca0426f9ad4aa644a5489c29cec1883810cbdfb5 Mon Sep 17 00:00:00 2001 From: jenny <63012604+JennyPng@users.noreply.github.com> Date: Wed, 21 Jan 2026 16:04:52 -0800 Subject: [PATCH 075/112] Update conda-update-pipeline.yml for Azure Pipelines --- eng/pipelines/conda-update-pipeline.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/eng/pipelines/conda-update-pipeline.yml b/eng/pipelines/conda-update-pipeline.yml index 5d74cf537050..c9bf4645c925 100644 --- a/eng/pipelines/conda-update-pipeline.yml +++ b/eng/pipelines/conda-update-pipeline.yml @@ -49,4 +49,3 @@ extends: Build: $(Build.BuildId) BaseBranchName: main - displayName: 'Create Pull Request' \ No newline at end of file From 022fca4b9e9a2cafd4c09dace7e7a6f6b03528a1 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Wed, 21 Jan 2026 16:11:42 -0800 Subject: [PATCH 076/112] minor --- eng/pipelines/conda-update-pipeline.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/eng/pipelines/conda-update-pipeline.yml b/eng/pipelines/conda-update-pipeline.yml index c9bf4645c925..3be675e868f5 100644 --- a/eng/pipelines/conda-update-pipeline.yml +++ b/eng/pipelines/conda-update-pipeline.yml @@ -29,6 +29,7 @@ extends: - script: | python -m pip install --upgrade pip python -m pip install "eng/tools/azure-sdk-tools[build]" + python -m pip install dateutils displayName: 'Prep Environment' - script: | From c35718b10d06bac5916da30f73607bfcb84c60ff Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Wed, 21 Jan 2026 16:41:11 -0800 Subject: [PATCH 077/112] variable group? --- eng/pipelines/conda-update-pipeline.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/eng/pipelines/conda-update-pipeline.yml b/eng/pipelines/conda-update-pipeline.yml index 3be675e868f5..de2c50da8bfa 100644 --- a/eng/pipelines/conda-update-pipeline.yml +++ b/eng/pipelines/conda-update-pipeline.yml @@ -1,3 +1,5 @@ +trigger: none + extends: template: /eng/pipelines/templates/stages/1es-redirect.yml parameters: @@ -11,6 +13,7 @@ extends: displayName: Update Conda Files and Submit PR variables: - template: /eng/pipelines/templates/variables/globals.yml + - group: Release Secrets for GitHub pool: name: azsdk-pool From b6920cecfe708f85db407c89965bd060dff9f590 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Wed, 21 Jan 2026 16:52:40 -0800 Subject: [PATCH 078/112] nvm --- eng/pipelines/conda-update-pipeline.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/eng/pipelines/conda-update-pipeline.yml b/eng/pipelines/conda-update-pipeline.yml index de2c50da8bfa..cb07a81754de 100644 --- a/eng/pipelines/conda-update-pipeline.yml +++ b/eng/pipelines/conda-update-pipeline.yml @@ -13,7 +13,6 @@ extends: displayName: Update Conda Files and Submit PR variables: - template: /eng/pipelines/templates/variables/globals.yml - - group: Release Secrets for GitHub pool: name: azsdk-pool From f63e243f2c7a282d1d9a67a7a7dae7b3e180b9a8 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Wed, 21 Jan 2026 17:37:24 -0800 Subject: [PATCH 079/112] minor --- eng/pipelines/conda-update-pipeline.yml | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/eng/pipelines/conda-update-pipeline.yml b/eng/pipelines/conda-update-pipeline.yml index cb07a81754de..42357d85058c 100644 --- a/eng/pipelines/conda-update-pipeline.yml +++ b/eng/pipelines/conda-update-pipeline.yml @@ -1,4 +1,5 @@ -trigger: none +trigger: none +pr: none extends: template: /eng/pipelines/templates/stages/1es-redirect.yml @@ -44,11 +45,9 @@ extends: CommitMsg: 'Update conda files' PRTitle: 'Conda Release Update generated from $(Build.BuildId)' PRBody: | - This PR was automatically generated to update conda files. + This PR was automatically generated to update conda files for a new release. - - Updates package versions in conda-sdk-client.yml - - Adds new packages if detected - - Updates yamls and changelogs - - Build: $(Build.BuildId) + - Updates outdated package versions in conda-sdk-client.yml + - Adds new packages to conda-sdk-client.yml + - Adds/updates yamls and changelogs BaseBranchName: main From 20e3274b1b7ab7926e07243ca0403149ef254da4 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Thu, 22 Jan 2026 10:02:27 -0800 Subject: [PATCH 080/112] add schedule to pipeline --- conda/update_conda_files.py | 47 +++++++++++++++++-- eng/pipelines/conda-update-pipeline.yml | 61 ++++++++++++++++++++++++- 2 files changed, 102 insertions(+), 6 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 77f18a5837b8..438d0e167c95 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -62,8 +62,15 @@ def quoted_presenter(dumper, data): return dumper.represent_scalar("tag:yaml.org,2002:str", data, style="'") -def update_conda_version() -> Tuple[datetime, str]: - """Update the AZURESDK_CONDA_VERSION in conda_env.yml and return the old and new versions.""" +def update_conda_version( + target_release_date: Optional[datetime] = None, +) -> Tuple[datetime, str]: + """Update the AZURESDK_CONDA_VERSION in conda_env.yml and return the old and new versions. + + Args: + target_release_date: Optional specific release date to use. If None, calculates + the next release date by adding RELEASE_PERIOD_MONTHS to the old version. + """ with open(CONDA_ENV_PATH, "r") as file: conda_env_data = yaml.safe_load(file) @@ -71,7 +78,10 @@ def update_conda_version() -> Tuple[datetime, str]: old_version = conda_env_data["variables"]["AZURESDK_CONDA_VERSION"] old_date = datetime.strptime(old_version, "%Y.%m.%d") - new_date = old_date + relativedelta(months=RELEASE_PERIOD_MONTHS) + if target_release_date: + new_date = target_release_date + else: + new_date = old_date + relativedelta(months=RELEASE_PERIOD_MONTHS) # bump version new_version = new_date.strftime("%Y.%m.%d") @@ -929,11 +939,40 @@ def update_mgmt_plane_release_log( action="store_true", help="Enable debug logging", ) + parser.add_argument( + "--release-date", + type=str, + default=None, + help="Release date in 'MM.DD' format (e.g., '03.01', '12.01'). " + "Year is determined automatically. If not provided, the next release date is calculated.", + ) args = parser.parse_args() configure_logging(args) - old_date, new_version = update_conda_version() + # Handle release date + if args.release_date: + try: + current_year = datetime.now().year + target_release_date = datetime.strptime( + f"{current_year}.{args.release_date}", "%Y.%m.%d" + ) + logger.info( + f"Using provided release date: {target_release_date.strftime('%Y.%m.%d')}" + ) + except ValueError as e: + logger.error(f"Invalid release date format '{args.release_date}': {e}") + logger.error("Expected format: 'MM.DD' (e.g., '03.01', '12.01')") + exit(1) + else: + logger.info( + "No release date provided, auto bumping old date by {} months.".format( + RELEASE_PERIOD_MONTHS + ) + ) + target_release_date = None + + old_date, new_version = update_conda_version(target_release_date) # convert to mm/dd/yyyy format for comparison with CSV dates old_version = old_date.strftime("%m/%d/%Y") diff --git a/eng/pipelines/conda-update-pipeline.yml b/eng/pipelines/conda-update-pipeline.yml index 42357d85058c..9703e0f8585f 100644 --- a/eng/pipelines/conda-update-pipeline.yml +++ b/eng/pipelines/conda-update-pipeline.yml @@ -1,6 +1,45 @@ trigger: none pr: none +parameters: + - name: releaseDate + displayName: Release Date (MM.DD) + type: string + default: 'auto' + values: + - auto + - 03.01 + - 06.01 + - 09.01 + - 12.01 + +# Scheduled to run a week before each quarterly release +schedules: + - cron: "0 0 24 11 *" + displayName: Pre-December Quarterly Release + branches: + include: + - main + always: true + - cron: "0 0 22 2 *" + displayName: Pre-March Quarterly Release + branches: + include: + - main + always: true + - cron: "0 0 25 5 *" + displayName: Pre-June Quarterly Release + branches: + include: + - main + always: true + - cron: "0 0 25 8 *" + displayName: Pre-September Quarterly Release + branches: + include: + - main + always: true + extends: template: /eng/pipelines/templates/stages/1es-redirect.yml parameters: @@ -14,6 +53,20 @@ extends: displayName: Update Conda Files and Submit PR variables: - template: /eng/pipelines/templates/variables/globals.yml + - name: ReleaseDate + ${{ if eq(parameters.releaseDate, 'auto') }}: + ${{ if contains(variables['Build.CronSchedule.DisplayName'], 'December') }}: + value: '12.01' + ${{ elseif contains(variables['Build.CronSchedule.DisplayName'], 'March') }}: + value: '03.01' + ${{ elseif contains(variables['Build.CronSchedule.DisplayName'], 'June') }}: + value: '06.01' + ${{ elseif contains(variables['Build.CronSchedule.DisplayName'], 'September') }}: + value: '09.01' + ${{ else }}: + value: 'Unknown' + ${{ else }}: + value: ${{ parameters.releaseDate }} pool: name: azsdk-pool @@ -36,7 +89,11 @@ extends: displayName: 'Prep Environment' - script: | - python conda/update_conda_files.py + if [ "$(ReleaseDate)" != "Unknown" ]; then + python conda/update_conda_files.py --release-date "$(ReleaseDate)" + else + python conda/update_conda_files.py + fi displayName: 'Update Conda Files' - template: /eng/common/pipelines/templates/steps/create-pull-request.yml @@ -45,7 +102,7 @@ extends: CommitMsg: 'Update conda files' PRTitle: 'Conda Release Update generated from $(Build.BuildId)' PRBody: | - This PR was automatically generated to update conda files for a new release. + This PR was automatically generated to update Conda files for a new release. - Updates outdated package versions in conda-sdk-client.yml - Adds new packages to conda-sdk-client.yml From 412392fc6fa3c13a8e3054048c5e0c2e0d131086 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Thu, 22 Jan 2026 10:23:35 -0800 Subject: [PATCH 081/112] add date to pr name --- conda/update_conda_files.py | 3 +++ eng/pipelines/conda-update-pipeline.yml | 4 ++-- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 438d0e167c95..180ba0929419 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -974,6 +974,9 @@ def update_mgmt_plane_release_log( old_date, new_version = update_conda_version(target_release_date) + # Output version as Azure DevOps pipeline variable + print(f"##vso[task.setvariable variable=CondaReleaseVersion]{new_version}") + # convert to mm/dd/yyyy format for comparison with CSV dates old_version = old_date.strftime("%m/%d/%Y") diff --git a/eng/pipelines/conda-update-pipeline.yml b/eng/pipelines/conda-update-pipeline.yml index 9703e0f8585f..4c5b1b1176c0 100644 --- a/eng/pipelines/conda-update-pipeline.yml +++ b/eng/pipelines/conda-update-pipeline.yml @@ -99,8 +99,8 @@ extends: - template: /eng/common/pipelines/templates/steps/create-pull-request.yml parameters: PRBranchName: conda-update-$(Build.BuildId) - CommitMsg: 'Update conda files' - PRTitle: 'Conda Release Update generated from $(Build.BuildId)' + CommitMsg: 'Update conda files for $(CondaReleaseVersion) release' + PRTitle: 'Conda Release $(CondaReleaseVersion) generated by $(Build.BuildId)' PRBody: | This PR was automatically generated to update Conda files for a new release. From 9e9e05c5fcff3a8574126e74068fa82d4b095059 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Tue, 27 Jan 2026 10:05:29 -0800 Subject: [PATCH 082/112] Minor clean --- conda/conda-recipes/azure-mgmt/meta.yaml | 5 ----- eng/pipelines/conda-update-pipeline.yml | 2 +- .../azure-sdk-tools/ci_tools/parsing/parse_functions.py | 2 +- 3 files changed, 2 insertions(+), 7 deletions(-) diff --git a/conda/conda-recipes/azure-mgmt/meta.yaml b/conda/conda-recipes/azure-mgmt/meta.yaml index d5f4cbdf7057..191506a42734 100644 --- a/conda/conda-recipes/azure-mgmt/meta.yaml +++ b/conda/conda-recipes/azure-mgmt/meta.yaml @@ -260,11 +260,6 @@ test: - azure.mgmt.dashboard.operations - azure.mgmt.databox - azure.mgmt.databox.aio - - azure.mgmt.datab - - azure.mgmt.datab.aio - - azure.mgmt.datab.aio.operations - - azure.mgmt.datab.models - - azure.mgmt.datab.operations - azure.mgmt.databoxedge - azure.mgmt.databoxedge.aio - azure.mgmt.databoxedge.aio diff --git a/eng/pipelines/conda-update-pipeline.yml b/eng/pipelines/conda-update-pipeline.yml index 4c5b1b1176c0..f6341321ec88 100644 --- a/eng/pipelines/conda-update-pipeline.yml +++ b/eng/pipelines/conda-update-pipeline.yml @@ -85,7 +85,7 @@ extends: - script: | python -m pip install --upgrade pip python -m pip install "eng/tools/azure-sdk-tools[build]" - python -m pip install dateutils + python -m pip install python-dateutil displayName: 'Prep Environment' - script: | diff --git a/eng/tools/azure-sdk-tools/ci_tools/parsing/parse_functions.py b/eng/tools/azure-sdk-tools/ci_tools/parsing/parse_functions.py index 0d41764f1d36..31a3d8d323d4 100644 --- a/eng/tools/azure-sdk-tools/ci_tools/parsing/parse_functions.py +++ b/eng/tools/azure-sdk-tools/ci_tools/parsing/parse_functions.py @@ -395,7 +395,7 @@ def __str__(self): def update_build_config(package_path: str, new_build_config: Dict[str, Any]) -> Dict[str, Any]: """ - Attempts to update a pyproject.toml's [tools.azure-sdk-tools] section with a new check configuration. + Attempts to update a pyproject.toml's [tool.azure-sdk-tools] section with a new check configuration. This function can only append or override existing check values. It cannot delete them. """ From b52a69e1be1dd968379badcf307d09d7347186d5 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Tue, 27 Jan 2026 14:00:20 -0800 Subject: [PATCH 083/112] initial draft of publish stage --- .../templates/stages/conda-sdk-client.yml | 86 +++++++++++++++++++ 1 file changed, 86 insertions(+) diff --git a/eng/pipelines/templates/stages/conda-sdk-client.yml b/eng/pipelines/templates/stages/conda-sdk-client.yml index 98b6811d2600..7b14158da625 100644 --- a/eng/pipelines/templates/stages/conda-sdk-client.yml +++ b/eng/pipelines/templates/stages/conda-sdk-client.yml @@ -1,4 +1,8 @@ parameters: + - name: conda_username + displayName: 'Conda Username' + type: string + default: 'azure-sdk' - name: release_msal displayName: 'msal' type: boolean @@ -906,4 +910,86 @@ extends: - package: azure-mgmt-workloadssapvirtualinstance version: 1.0.0 + - stage: Publish_To_Conda + displayName: Publish to Conda + dependsOn: Build_Universal_Dependencies + condition: and(succeeded(), ne(variables['SetDevVersion'], 'true'), ne(variables['Skip.Release'], 'true')) + variables: + - template: /eng/pipelines/templates/variables/globals.yml@self + + jobs: + - deployment: PublishCondaPackages + displayName: 'Publish Conda Packages' + environment: package-publish + timeoutInMinutes: 120 + + pool: + name: azsdk-pool + image: ubuntu-24.04 + os: linux + + strategy: + runOnce: + deploy: + steps: + - download: current + artifact: conda + displayName: 'Download Conda Artifacts' + + - task: UsePythonVersion@0 + displayName: 'Use Python 3.11' + inputs: + versionSpec: '3.11' + + - pwsh: | + python -m pip install anaconda-client + displayName: 'Install anaconda-client' + + # TODO get token from keyvault? + + - pwsh: | + $packageDir = "$(Pipeline.Workspace)/conda" + $packages = Get-ChildItem -Path $packageDir -Recurse -Include "*.tar.bz2", "*.conda" + + if ($packages.Count -eq 0) { + Write-Error "No conda packages found in $packageDir" + exit 1 + } + + Write-Host "Found $($packages.Count) packages to upload:" + foreach ($pkg in $packages) { + Write-Host " - $($pkg.Name)" + } + + $failed = @() + foreach ($pkg in $packages) { + Write-Host "Uploading $($pkg.FullName)..." + $retries = 3 + $success = $false + + for ($i = 1; $i -le $retries; $i++) { + anaconda upload --user ${{ parameters.conda_username }} --skip-existing $($pkg.FullName) + if ($LASTEXITCODE -eq 0) { + Write-Host "Successfully uploaded $($pkg.Name) on attempt $i." + $success = $true + break + } + Write-Warning "Attempt $i failed, retrying..." + Start-Sleep -Seconds 5 + } + + if (-not $success) { + $failed += $pkg.Name + } + } + + if ($failed.Count -gt 0) { + Write-Error "Failed to upload the following packages: $($failed -join ', ')" + exit 1 + } + Write-Host "Successfully uploaded all packages!" + displayName: 'Publish to Conda' + workingDirectory: $(Pipeline.Workspace) + env: + ANACONDA_API_TOKEN: $(AnacondaApiToken) From 4b1bad14ed5a412f660879b5d9a103b2b42e7b4d Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Tue, 27 Jan 2026 14:25:36 -0800 Subject: [PATCH 084/112] yml improvements, enable dry run of file updates --- conda/update_conda_files.py | 2 +- eng/pipelines/conda-update-pipeline.yml | 27 +++++++++++-------- .../templates/stages/conda-sdk-client.yml | 12 +++++++++ 3 files changed, 29 insertions(+), 12 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 180ba0929419..f1cf51e01e3f 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -974,7 +974,7 @@ def update_mgmt_plane_release_log( old_date, new_version = update_conda_version(target_release_date) - # Output version as Azure DevOps pipeline variable + # Output version as Azure DevOps pipeline variable to use in PR print(f"##vso[task.setvariable variable=CondaReleaseVersion]{new_version}") # convert to mm/dd/yyyy format for comparison with CSV dates diff --git a/eng/pipelines/conda-update-pipeline.yml b/eng/pipelines/conda-update-pipeline.yml index f6341321ec88..8a1c7f86bd2e 100644 --- a/eng/pipelines/conda-update-pipeline.yml +++ b/eng/pipelines/conda-update-pipeline.yml @@ -12,6 +12,10 @@ parameters: - 06.01 - 09.01 - 12.01 + - name: dryRun + displayName: Dry Run (skip PR submission) + type: boolean + default: false # Scheduled to run a week before each quarterly release schedules: @@ -96,15 +100,16 @@ extends: fi displayName: 'Update Conda Files' - - template: /eng/common/pipelines/templates/steps/create-pull-request.yml - parameters: - PRBranchName: conda-update-$(Build.BuildId) - CommitMsg: 'Update conda files for $(CondaReleaseVersion) release' - PRTitle: 'Conda Release $(CondaReleaseVersion) generated by $(Build.BuildId)' - PRBody: | - This PR was automatically generated to update Conda files for a new release. + - ${{ if eq(parameters.dryRun, false) }}: + - template: /eng/common/pipelines/templates/steps/create-pull-request.yml + parameters: + PRBranchName: conda-update-$(Build.BuildId) + CommitMsg: 'Update conda files for $(CondaReleaseVersion) release' + PRTitle: 'Conda Release $(CondaReleaseVersion) generated by $(Build.BuildId)' + PRBody: | + This PR was automatically generated to update Conda files for a new release. - - Updates outdated package versions in conda-sdk-client.yml - - Adds new packages to conda-sdk-client.yml - - Adds/updates yamls and changelogs - BaseBranchName: main + - Updates outdated package versions in conda-sdk-client.yml + - Adds new packages to conda-sdk-client.yml + - Adds/updates yamls and changelogs + BaseBranchName: main diff --git a/eng/pipelines/templates/stages/conda-sdk-client.yml b/eng/pipelines/templates/stages/conda-sdk-client.yml index 7b14158da625..6793493cf8b6 100644 --- a/eng/pipelines/templates/stages/conda-sdk-client.yml +++ b/eng/pipelines/templates/stages/conda-sdk-client.yml @@ -948,6 +948,18 @@ extends: # TODO get token from keyvault? - pwsh: | + if (-not $env:ANACONDA_API_TOKEN) { + Write-Error "ANACONDA_API_TOKEN is not set" + exit 1 + } + + anaconda whoami + if ($LASTEXITCODE -ne 0) { + Write-Error "Invalid or expired Anaconda API token" + exit 1 + } + Write-Host "Anaconda authentication successful" + $packageDir = "$(Pipeline.Workspace)/conda" $packages = Get-ChildItem -Path $packageDir -Recurse -Include "*.tar.bz2", "*.conda" From 905e59002e5dddc9ab5eef51918f22b631a7dd07 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Tue, 27 Jan 2026 14:42:58 -0800 Subject: [PATCH 085/112] remove duplicated tool section from merge --- sdk/communication/azure-communication-identity/pyproject.toml | 4 ---- sdk/keyvault/azure-keyvault-keys/pyproject.toml | 4 ---- 2 files changed, 8 deletions(-) diff --git a/sdk/communication/azure-communication-identity/pyproject.toml b/sdk/communication/azure-communication-identity/pyproject.toml index c6235bc15398..fbc3cd0f05e1 100644 --- a/sdk/communication/azure-communication-identity/pyproject.toml +++ b/sdk/communication/azure-communication-identity/pyproject.toml @@ -7,7 +7,3 @@ bundle_name = "azure-communication" [tool.pytest.ini_options] asyncio_default_fixture_loop_scope = "function" - -[tool.azure-sdk-conda] -in_bundle = true -bundle_name = "azure-communication" diff --git a/sdk/keyvault/azure-keyvault-keys/pyproject.toml b/sdk/keyvault/azure-keyvault-keys/pyproject.toml index 4eb75e5353ad..e7036b8d69fa 100644 --- a/sdk/keyvault/azure-keyvault-keys/pyproject.toml +++ b/sdk/keyvault/azure-keyvault-keys/pyproject.toml @@ -67,7 +67,3 @@ dev = [ "parameterized>=0.7.3", "python-dateutil>=2.8.0", ] - -[tool.azure-sdk-conda] -in_bundle = true -bundle_name = "azure-keyvault" From 611dac3bd4112eb8f3192e33cf54c9aa5ae4a0f5 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Wed, 28 Jan 2026 13:12:14 -0800 Subject: [PATCH 086/112] fix org name --- eng/pipelines/templates/stages/conda-sdk-client.yml | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/eng/pipelines/templates/stages/conda-sdk-client.yml b/eng/pipelines/templates/stages/conda-sdk-client.yml index 6793493cf8b6..095240c7c47e 100644 --- a/eng/pipelines/templates/stages/conda-sdk-client.yml +++ b/eng/pipelines/templates/stages/conda-sdk-client.yml @@ -2,7 +2,7 @@ parameters: - name: conda_username displayName: 'Conda Username' type: string - default: 'azure-sdk' + default: 'Microsoft' - name: release_msal displayName: 'msal' type: boolean @@ -945,8 +945,6 @@ extends: python -m pip install anaconda-client displayName: 'Install anaconda-client' - # TODO get token from keyvault? - - pwsh: | if (-not $env:ANACONDA_API_TOKEN) { Write-Error "ANACONDA_API_TOKEN is not set" From 8ad3e2ee34e397f6365ced9cfd91308f46f96ce6 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Wed, 28 Jan 2026 16:16:01 -0800 Subject: [PATCH 087/112] move publish logic to python script --- .../templates/stages/conda-sdk-client.yml | 71 ++----- eng/scripts/publish_conda.py | 181 ++++++++++++++++++ 2 files changed, 194 insertions(+), 58 deletions(-) create mode 100644 eng/scripts/publish_conda.py diff --git a/eng/pipelines/templates/stages/conda-sdk-client.yml b/eng/pipelines/templates/stages/conda-sdk-client.yml index 095240c7c47e..bb27858ac483 100644 --- a/eng/pipelines/templates/stages/conda-sdk-client.yml +++ b/eng/pipelines/templates/stages/conda-sdk-client.yml @@ -3,6 +3,10 @@ parameters: displayName: 'Conda Username' type: string default: 'Microsoft' + - name: dry_run + displayName: 'Dry Run (skip upload)' + type: boolean + default: false - name: release_msal displayName: 'msal' type: boolean @@ -916,7 +920,8 @@ extends: condition: and(succeeded(), ne(variables['SetDevVersion'], 'true'), ne(variables['Skip.Release'], 'true')) variables: - template: /eng/pipelines/templates/variables/globals.yml@self - + # TODO this stage should only be available on main branch after testing + # TODO this whole pipeline should be triggered on main when a new release is merged jobs: - deployment: PublishCondaPackages displayName: 'Publish Conda Packages' @@ -945,61 +950,11 @@ extends: python -m pip install anaconda-client displayName: 'Install anaconda-client' - - pwsh: | - if (-not $env:ANACONDA_API_TOKEN) { - Write-Error "ANACONDA_API_TOKEN is not set" - exit 1 - } - - anaconda whoami - if ($LASTEXITCODE -ne 0) { - Write-Error "Invalid or expired Anaconda API token" - exit 1 - } - Write-Host "Anaconda authentication successful" - - $packageDir = "$(Pipeline.Workspace)/conda" - $packages = Get-ChildItem -Path $packageDir -Recurse -Include "*.tar.bz2", "*.conda" - - if ($packages.Count -eq 0) { - Write-Error "No conda packages found in $packageDir" - exit 1 - } - - Write-Host "Found $($packages.Count) packages to upload:" - foreach ($pkg in $packages) { - Write-Host " - $($pkg.Name)" - } - - $failed = @() - foreach ($pkg in $packages) { - Write-Host "Uploading $($pkg.FullName)..." - $retries = 3 - $success = $false - - for ($i = 1; $i -le $retries; $i++) { - anaconda upload --user ${{ parameters.conda_username }} --skip-existing $($pkg.FullName) - if ($LASTEXITCODE -eq 0) { - Write-Host "Successfully uploaded $($pkg.Name) on attempt $i." - $success = $true - break - } - Write-Warning "Attempt $i failed, retrying..." - Start-Sleep -Seconds 5 - } - - if (-not $success) { - $failed += $pkg.Name - } - } - - if ($failed.Count -gt 0) { - Write-Error "Failed to upload the following packages: $($failed -join ', ')" - exit 1 - } - - Write-Host "Successfully uploaded all packages!" + - task: PythonScript@0 displayName: 'Publish to Conda' - workingDirectory: $(Pipeline.Workspace) - env: - ANACONDA_API_TOKEN: $(AnacondaApiToken) + inputs: + scriptSource: 'filePath' + scriptPath: '$(Build.SourcesDirectory)/eng/scripts/publish_conda.py' + arguments: '--package-dir "$(Pipeline.Workspace)/conda" --user "${{ parameters.conda_username }}"${{ if eq(parameters.dry_run, true) }} --dry-run${{ else }}${{ end }}' + env: + ANACONDA_API_TOKEN: $(conda-release-apikey) \ No newline at end of file diff --git a/eng/scripts/publish_conda.py b/eng/scripts/publish_conda.py new file mode 100644 index 000000000000..aeb186693bb9 --- /dev/null +++ b/eng/scripts/publish_conda.py @@ -0,0 +1,181 @@ +#!/usr/bin/env python +""" +Publish conda packages to Anaconda.org. + +Usage: + python publish_conda.py --package-dir ./conda --user Microsoft + python publish_conda.py --package-dir ./conda --user Microsoft --dry-run +""" + +import argparse +import os +import subprocess +import sys +import time +from pathlib import Path +from ci_tools.logging import logger, configure_logging + + +def verify_authentication() -> bool: + """Verify that the Anaconda API token is valid.""" + if not os.environ.get("ANACONDA_API_TOKEN"): + logger.error("ANACONDA_API_TOKEN environment variable is not set") + return False + + result = subprocess.run( + ["anaconda", "whoami"], + capture_output=True, + text=True, + ) + + if result.returncode != 0: + logger.error("Invalid or expired Anaconda API token") + logger.error(result.stderr) + return False + + logger.info("Anaconda authentication successful") + logger.info(result.stdout.strip()) + return True + + +def find_packages(package_dir: Path) -> list[Path]: + """Find all built conda artifacts in the given directory.""" + packages = [] + for pattern in ["**/*.tar.bz2", "**/*.conda"]: + packages.extend(package_dir.glob(pattern)) + return sorted(packages) + + +def upload_package( + package_path: Path, + user: str, + max_retries: int = 3, + retry_delay: float = 5.0, + dry_run: bool = False, +) -> bool: + """ + Upload a single package to Anaconda.org with retry logic. + + Returns True if successful, False otherwise. + """ + for attempt in range(1, max_retries + 1): + logger.info( + f"Uploading {package_path.name} (attempt {attempt}/{max_retries})..." + ) + + if dry_run: + logger.info(f"[DRY RUN] Would upload: {package_path}") + return True + + result = subprocess.run( + [ + "anaconda", + "upload", + "--user", + user, + "--skip-existing", + str(package_path), + "--private", # TODO remove after testing is complete + ], + capture_output=True, + text=True, + ) + + if result.returncode == 0: + logger.info(f"Successfully uploaded {package_path.name}") + if result.stdout: + logger.debug(result.stdout) + return True + + logger.warning(f"Attempt {attempt} failed: {result.stderr.strip()}") + + if attempt < max_retries: + logger.info(f"Retrying in {retry_delay} seconds...") + time.sleep(retry_delay) + + return False + + +def main() -> int: + parser = argparse.ArgumentParser( + description="Publish conda packages to Anaconda.org" + ) + parser.add_argument( + "--package-dir", + type=Path, + required=True, + help="Directory containing conda packages to upload", + ) + parser.add_argument( + "--user", + type=str, + required=True, + help="Anaconda.org username or organization to upload to", + ) + parser.add_argument( + "--dry-run", + action="store_true", + help="List packages that would be uploaded without actually uploading", + ) + parser.add_argument( + "--max-retries", + type=int, + default=3, + help="Maximum number of upload retries per package (default: 3)", + ) + parser.add_argument( + "--verbose", + action="store_true", + help="Enable verbose logging", + ) + + args = parser.parse_args() + configure_logging(args) + + if args.dry_run: + logger.info("=== DRY RUN MODE - No packages will be uploaded ===") + + # Verify authentication + if not verify_authentication(): + return 1 + + # Find packages + if not args.package_dir.exists(): + logger.error(f"Package directory does not exist: {args.package_dir}") + return 1 + + packages = find_packages(args.package_dir) + + if not packages: + logger.error(f"No conda packages found in {args.package_dir}") + return 1 + + logger.info(f"Found {len(packages)} packages to upload:") + for pkg in packages: + logger.info(f" - {pkg.name}") + + # Upload packages + failed = [] + for package in packages: + success = upload_package( + package, + user=args.user, + max_retries=args.max_retries, + dry_run=args.dry_run, + ) + if not success: + failed.append(package.name) + + # Report results + if failed: + logger.error(f"Failed to upload {len(failed)} package(s):") + for name in failed: + logger.error(f" - {name}") + return 1 + + logger.info(f"Successfully uploaded all {len(packages)} packages!") + return 0 + + +if __name__ == "__main__": + sys.exit(main()) From 2df8c9eaf3870120840a663cf7762ee65b170020 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Wed, 28 Jan 2026 16:39:30 -0800 Subject: [PATCH 088/112] just to be safe --- eng/scripts/publish_conda.py | 51 ++++++++++++++++++------------------ 1 file changed, 25 insertions(+), 26 deletions(-) diff --git a/eng/scripts/publish_conda.py b/eng/scripts/publish_conda.py index aeb186693bb9..c3ceb5954fe6 100644 --- a/eng/scripts/publish_conda.py +++ b/eng/scripts/publish_conda.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """ Publish conda packages to Anaconda.org. @@ -67,31 +66,31 @@ def upload_package( logger.info(f"[DRY RUN] Would upload: {package_path}") return True - result = subprocess.run( - [ - "anaconda", - "upload", - "--user", - user, - "--skip-existing", - str(package_path), - "--private", # TODO remove after testing is complete - ], - capture_output=True, - text=True, - ) - - if result.returncode == 0: - logger.info(f"Successfully uploaded {package_path.name}") - if result.stdout: - logger.debug(result.stdout) - return True - - logger.warning(f"Attempt {attempt} failed: {result.stderr.strip()}") - - if attempt < max_retries: - logger.info(f"Retrying in {retry_delay} seconds...") - time.sleep(retry_delay) + # result = subprocess.run( + # [ + # "anaconda", + # "upload", + # "--user", + # user, + # "--skip-existing", + # str(package_path), + # "--private", # TODO remove after testing is complete + # ], + # capture_output=True, + # text=True, + # ) + + # if result.returncode == 0: + # logger.info(f"Successfully uploaded {package_path.name}") + # if result.stdout: + # logger.debug(result.stdout) + # return True + + # logger.warning(f"Attempt {attempt} failed: {result.stderr.strip()}") + + # if attempt < max_retries: + # logger.info(f"Retrying in {retry_delay} seconds...") + # time.sleep(retry_delay) return False From 1d213fb7d3130b754232b571c447c93da0bbcfb3 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Thu, 29 Jan 2026 09:55:10 -0800 Subject: [PATCH 089/112] remove publishing changes from this PR --- .../templates/stages/conda-sdk-client.yml | 53 ------------------- 1 file changed, 53 deletions(-) diff --git a/eng/pipelines/templates/stages/conda-sdk-client.yml b/eng/pipelines/templates/stages/conda-sdk-client.yml index bb27858ac483..68e89ba8f7f0 100644 --- a/eng/pipelines/templates/stages/conda-sdk-client.yml +++ b/eng/pipelines/templates/stages/conda-sdk-client.yml @@ -1,12 +1,4 @@ parameters: - - name: conda_username - displayName: 'Conda Username' - type: string - default: 'Microsoft' - - name: dry_run - displayName: 'Dry Run (skip upload)' - type: boolean - default: false - name: release_msal displayName: 'msal' type: boolean @@ -913,48 +905,3 @@ extends: version: 1.0.0 - package: azure-mgmt-workloadssapvirtualinstance version: 1.0.0 - - - stage: Publish_To_Conda - displayName: Publish to Conda - dependsOn: Build_Universal_Dependencies - condition: and(succeeded(), ne(variables['SetDevVersion'], 'true'), ne(variables['Skip.Release'], 'true')) - variables: - - template: /eng/pipelines/templates/variables/globals.yml@self - # TODO this stage should only be available on main branch after testing - # TODO this whole pipeline should be triggered on main when a new release is merged - jobs: - - deployment: PublishCondaPackages - displayName: 'Publish Conda Packages' - environment: package-publish - timeoutInMinutes: 120 - - pool: - name: azsdk-pool - image: ubuntu-24.04 - os: linux - - strategy: - runOnce: - deploy: - steps: - - download: current - artifact: conda - displayName: 'Download Conda Artifacts' - - - task: UsePythonVersion@0 - displayName: 'Use Python 3.11' - inputs: - versionSpec: '3.11' - - - pwsh: | - python -m pip install anaconda-client - displayName: 'Install anaconda-client' - - - task: PythonScript@0 - displayName: 'Publish to Conda' - inputs: - scriptSource: 'filePath' - scriptPath: '$(Build.SourcesDirectory)/eng/scripts/publish_conda.py' - arguments: '--package-dir "$(Pipeline.Workspace)/conda" --user "${{ parameters.conda_username }}"${{ if eq(parameters.dry_run, true) }} --dry-run${{ else }}${{ end }}' - env: - ANACONDA_API_TOKEN: $(conda-release-apikey) \ No newline at end of file From a3511efe68bf923bb73893f76a48e08a6b32c780 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Thu, 29 Jan 2026 11:03:39 -0800 Subject: [PATCH 090/112] clean up some todos --- conda/conda_helper_functions.py | 14 +-- conda/update_conda_files.py | 23 ++-- eng/scripts/publish_conda.py | 180 -------------------------------- 3 files changed, 15 insertions(+), 202 deletions(-) delete mode 100644 eng/scripts/publish_conda.py diff --git a/conda/conda_helper_functions.py b/conda/conda_helper_functions.py index 4dd667928310..4d950a88bd8e 100644 --- a/conda/conda_helper_functions.py +++ b/conda/conda_helper_functions.py @@ -53,18 +53,22 @@ def get_bundle_name(package_name: str) -> Optional[str]: return None parsed = ParsedSetup.from_path(package_path) if not parsed: - # TODO raise something + # can't proceed, need to know if it's bundled or not logger.error(f"Failed to parse setup for package {package_name}") - return None + raise Exception(f"Failed to parse setup for package {package_name}") conda_config = parsed.get_conda_config() if not conda_config: if parsed.is_stable_release(): - # TODO raise something - logger.warning( + raise Exception( f"Stable release package {package_name} needs a conda config" ) + + # beta or alpha package are not released + logger.warning( + f"No conda config found for package {package_name}, which may be a pre-release" + ) return None if conda_config and "bundle_name" in conda_config: @@ -199,8 +203,6 @@ def package_needs_update( f"Package {package_row.get(PACKAGE_COL)} is skipped due to missing {FIRST_GA_DATE_COL if is_new else LATEST_GA_DATE_COL}." ) - # TODO need to verify that this is the desired behavior / we're not skipping needed packages - return False try: diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index f1cf51e01e3f..e67f8ed694c3 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -189,7 +189,7 @@ def update_conda_sdk_client_yml( logger.info( f"Package {pkg_name} download_uri mismatch with PyPi, updating {curr_download_uri} to {download_uri}" ) - checkout_item["version"] = latest_version + # checkout for these packages only has download_uri, no version field checkout_item["download_uri"] = download_uri logger.info( f"Updated download_uri for {pkg_name} with version {latest_version}: {download_uri}" @@ -374,7 +374,6 @@ def determine_service_info( else: common_root = "azure" - # TODO handle exceptions msrest,msal.msal-extensions,azure-ai-vision,azure-healthinsights package_path = get_package_path(package_name) if not service_name and package_path: service_name = os.path.basename(os.path.dirname(package_path)) @@ -386,8 +385,7 @@ def format_requirement(req: str) -> str: """Format a requirement string for conda meta.yaml.""" name_unpinned = re.split(r"[>=={{{{ environ.get('AZURESDK_CONDA_VERSION', '0.0.0') }}}}" @@ -401,7 +399,7 @@ def get_package_requirements(parsed: ParsedSetup) -> Tuple[List[str], List[str]] host_requirements = set(["pip"]) run_requirements = set() - # TODO finalize actual list of essentials, this is more of a placeholder with reqs idk how to find dynamically + # reqs commonly seen in existing meta.yaml files that aren't always in setup.py or pyproject.toml for essential_req in [ "azure-identity", "azure-core", @@ -427,8 +425,6 @@ def get_package_requirements(parsed: ParsedSetup) -> Tuple[List[str], List[str]] host_requirements.add(req_name) run_requirements.add(req_name) - # TODO there are other requirements to consider... - return list(host_requirements), list(run_requirements) @@ -439,13 +435,13 @@ def get_package_metadata( if package_path: service_dir = os.path.basename(os.path.dirname(package_path)) else: - # TODO service_dir = package_name.replace("azure-", "") + + # TODO handle bundle home_url = f"https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/{service_dir}/{package_name}" summary = f"Microsoft Azure {package_name.replace('azure-', '').replace('-', ' ').title()} Client Library for Python" - # TODO definitely need to check if this is actually always correct conda_url = f"https://aka.ms/azsdk/conda/releases/{service_dir}" description = ( f"This is the {summary}.\n Please see {conda_url} for version details." @@ -455,7 +451,6 @@ def get_package_metadata( def generate_data_plane_meta_yaml( - package_dict: Dict[str, Dict[str, str]], bundle_map: Dict[str, List[str]], package_name: str, bundle_name: Optional[str], @@ -463,8 +458,7 @@ def generate_data_plane_meta_yaml( """ Generate the meta.yaml content for a data plane package or release group. """ - - # TODO is it correct that the env var name is arbitrary and replaced in conda_functions.py? + # assumes env var name is arbitrary and replaced in conda_functions.py src_distr_name = package_name.split("-")[-1].upper() src_distribution_env_var = f"{src_distr_name}_SOURCE_DISTRIBUTION" @@ -581,7 +575,7 @@ def add_new_data_plane_packages( try: meta_yml = generate_data_plane_meta_yaml( - package_dict, bundle_map, package_name, bundle_name + bundle_map, package_name, bundle_name ) if bundle_name: bundles_processed.add(bundle_name) @@ -629,7 +623,6 @@ def add_new_mgmt_plane_packages( result.extend(new_mgmt_plane_names) return result - # TODO probably need to automate removal of deprecated packages from here existing_imports_text = test_match.group(1) existing_imports = [ line.strip() @@ -1065,8 +1058,6 @@ def update_mgmt_plane_release_log( package_dict, all_mgmt_plane_names, new_version ) - # TODO AKA link logic - print("=== REPORT ===") if conda_sdk_client_pkgs_result: diff --git a/eng/scripts/publish_conda.py b/eng/scripts/publish_conda.py deleted file mode 100644 index c3ceb5954fe6..000000000000 --- a/eng/scripts/publish_conda.py +++ /dev/null @@ -1,180 +0,0 @@ -""" -Publish conda packages to Anaconda.org. - -Usage: - python publish_conda.py --package-dir ./conda --user Microsoft - python publish_conda.py --package-dir ./conda --user Microsoft --dry-run -""" - -import argparse -import os -import subprocess -import sys -import time -from pathlib import Path -from ci_tools.logging import logger, configure_logging - - -def verify_authentication() -> bool: - """Verify that the Anaconda API token is valid.""" - if not os.environ.get("ANACONDA_API_TOKEN"): - logger.error("ANACONDA_API_TOKEN environment variable is not set") - return False - - result = subprocess.run( - ["anaconda", "whoami"], - capture_output=True, - text=True, - ) - - if result.returncode != 0: - logger.error("Invalid or expired Anaconda API token") - logger.error(result.stderr) - return False - - logger.info("Anaconda authentication successful") - logger.info(result.stdout.strip()) - return True - - -def find_packages(package_dir: Path) -> list[Path]: - """Find all built conda artifacts in the given directory.""" - packages = [] - for pattern in ["**/*.tar.bz2", "**/*.conda"]: - packages.extend(package_dir.glob(pattern)) - return sorted(packages) - - -def upload_package( - package_path: Path, - user: str, - max_retries: int = 3, - retry_delay: float = 5.0, - dry_run: bool = False, -) -> bool: - """ - Upload a single package to Anaconda.org with retry logic. - - Returns True if successful, False otherwise. - """ - for attempt in range(1, max_retries + 1): - logger.info( - f"Uploading {package_path.name} (attempt {attempt}/{max_retries})..." - ) - - if dry_run: - logger.info(f"[DRY RUN] Would upload: {package_path}") - return True - - # result = subprocess.run( - # [ - # "anaconda", - # "upload", - # "--user", - # user, - # "--skip-existing", - # str(package_path), - # "--private", # TODO remove after testing is complete - # ], - # capture_output=True, - # text=True, - # ) - - # if result.returncode == 0: - # logger.info(f"Successfully uploaded {package_path.name}") - # if result.stdout: - # logger.debug(result.stdout) - # return True - - # logger.warning(f"Attempt {attempt} failed: {result.stderr.strip()}") - - # if attempt < max_retries: - # logger.info(f"Retrying in {retry_delay} seconds...") - # time.sleep(retry_delay) - - return False - - -def main() -> int: - parser = argparse.ArgumentParser( - description="Publish conda packages to Anaconda.org" - ) - parser.add_argument( - "--package-dir", - type=Path, - required=True, - help="Directory containing conda packages to upload", - ) - parser.add_argument( - "--user", - type=str, - required=True, - help="Anaconda.org username or organization to upload to", - ) - parser.add_argument( - "--dry-run", - action="store_true", - help="List packages that would be uploaded without actually uploading", - ) - parser.add_argument( - "--max-retries", - type=int, - default=3, - help="Maximum number of upload retries per package (default: 3)", - ) - parser.add_argument( - "--verbose", - action="store_true", - help="Enable verbose logging", - ) - - args = parser.parse_args() - configure_logging(args) - - if args.dry_run: - logger.info("=== DRY RUN MODE - No packages will be uploaded ===") - - # Verify authentication - if not verify_authentication(): - return 1 - - # Find packages - if not args.package_dir.exists(): - logger.error(f"Package directory does not exist: {args.package_dir}") - return 1 - - packages = find_packages(args.package_dir) - - if not packages: - logger.error(f"No conda packages found in {args.package_dir}") - return 1 - - logger.info(f"Found {len(packages)} packages to upload:") - for pkg in packages: - logger.info(f" - {pkg.name}") - - # Upload packages - failed = [] - for package in packages: - success = upload_package( - package, - user=args.user, - max_retries=args.max_retries, - dry_run=args.dry_run, - ) - if not success: - failed.append(package.name) - - # Report results - if failed: - logger.error(f"Failed to upload {len(failed)} package(s):") - for name in failed: - logger.error(f" - {name}") - return 1 - - logger.info(f"Successfully uploaded all {len(packages)} packages!") - return 0 - - -if __name__ == "__main__": - sys.exit(main()) From f6f42e268cd448c143a84d516ce4e6b41ce9041d Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Thu, 29 Jan 2026 12:43:27 -0800 Subject: [PATCH 091/112] handle home_url for bundle --- conda/update_conda_files.py | 22 +++++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index e67f8ed694c3..b6524874512c 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -429,16 +429,26 @@ def get_package_requirements(parsed: ParsedSetup) -> Tuple[List[str], List[str]] def get_package_metadata( - package_name: str, package_path: Optional[str] + package_name: str, package_path: Optional[str], is_bundle: bool = False ) -> Tuple[str, str, str]: - """Extract package metadata for about section in meta.yaml.""" + """Extract package metadata for about section in meta.yaml. + + :param package_name: The name of the package or bundle. + :param package_path: The filesystem path to the package. + :param is_bundle: Whether this is a release bundle (affects URL structure). + """ if package_path: service_dir = os.path.basename(os.path.dirname(package_path)) else: service_dir = package_name.replace("azure-", "") - # TODO handle bundle - home_url = f"https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/{service_dir}/{package_name}" + # For bundles, URL points to service directory; for individual packages, include package name + if is_bundle: + home_url = ( + f"https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/{service_dir}" + ) + else: + home_url = f"https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/{service_dir}/{package_name}" summary = f"Microsoft Azure {package_name.replace('azure-', '').replace('-', ' ').title()} Client Library for Python" @@ -486,7 +496,9 @@ def generate_data_plane_meta_yaml( run_reqs = list(run_reqs) package_path = get_package_path(bundle_map[bundle_name][0]) - home_url, summary, description = get_package_metadata(bundle_name, package_path) + home_url, summary, description = get_package_metadata( + bundle_name, package_path, is_bundle=True + ) else: logger.info(f"Generating meta.yaml for package {package_name}") package_path = get_package_path(package_name) From 4a649ab749239ca72b6572bf2bdc5a96ae3147bc Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Thu, 29 Jan 2026 12:59:38 -0800 Subject: [PATCH 092/112] add next steps to generated PR desc --- eng/pipelines/conda-update-pipeline.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/eng/pipelines/conda-update-pipeline.yml b/eng/pipelines/conda-update-pipeline.yml index 8a1c7f86bd2e..2b2672c06825 100644 --- a/eng/pipelines/conda-update-pipeline.yml +++ b/eng/pipelines/conda-update-pipeline.yml @@ -112,4 +112,10 @@ extends: - Updates outdated package versions in conda-sdk-client.yml - Adds new packages to conda-sdk-client.yml - Adds/updates yamls and changelogs + + ## Next Steps + - [ ] For new data plane packages, submit this form to create a private dummy library placeholder in Conda before uploading a release: https://forms.office.com/Pages/ResponsePage.aspx?id=v4j5cvGGr0GRqy180BHbR180k2XpSUFBtXHTh8-jMUlUNlA1MFpZOVhZME1aNU1EU1Y3SjZRU0JNRC4u + - [ ] After this PR is merged and succeeds the Conda build, approve the pipeline to upload the releases to Conda + - [ ] After upload, delete the dummy library and make the new packages publicly available in Conda. + - [ ] Create an AKA link for new release logs here: http://aka.ms/ BaseBranchName: main From 2f8f0585d7690a35e427d2ac0ba9e101e749b9bf Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Thu, 29 Jan 2026 13:44:33 -0800 Subject: [PATCH 093/112] untouch pyprojects that only have diff coz resolving merge --- .../azure-communication-identity/pyproject.toml | 8 ++++---- sdk/keyvault/azure-keyvault-keys/pyproject.toml | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/sdk/communication/azure-communication-identity/pyproject.toml b/sdk/communication/azure-communication-identity/pyproject.toml index fbc3cd0f05e1..db898be6409d 100644 --- a/sdk/communication/azure-communication-identity/pyproject.toml +++ b/sdk/communication/azure-communication-identity/pyproject.toml @@ -1,9 +1,9 @@ [tool.azure-sdk-build] pyright = false -[tool.azure-sdk-conda] -in_bundle = true -bundle_name = "azure-communication" - [tool.pytest.ini_options] asyncio_default_fixture_loop_scope = "function" + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-communication" diff --git a/sdk/keyvault/azure-keyvault-keys/pyproject.toml b/sdk/keyvault/azure-keyvault-keys/pyproject.toml index e7036b8d69fa..7caa00c7dd7c 100644 --- a/sdk/keyvault/azure-keyvault-keys/pyproject.toml +++ b/sdk/keyvault/azure-keyvault-keys/pyproject.toml @@ -47,10 +47,6 @@ pytyped = ["py.typed"] [tool.azure-sdk-build] pyright = false -[tool.azure-sdk-conda] -in_bundle = true -bundle_name = "azure-keyvault" - [tool.uv.sources] azure-core = { path = "../../core/azure-core" } azure-keyvault-nspkg = { path = "../../nspkg/azure-keyvault-nspkg" } @@ -67,3 +63,7 @@ dev = [ "parameterized>=0.7.3", "python-dateutil>=2.8.0", ] + +[tool.azure-sdk-conda] +in_bundle = true +bundle_name = "azure-keyvault" From 1837d99c1039e0437037854757c9b9bdb763eb5e Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Thu, 29 Jan 2026 14:47:04 -0800 Subject: [PATCH 094/112] fix common_root, release type classification, other minor fixes --- conda/conda_helper_functions.py | 2 +- conda/update_conda_files.py | 16 +++++++++++----- .../ci_tools/parsing/parse_functions.py | 17 +++++++++++++---- .../tests/test_parse_functionality.py | 17 +++++++++++++++++ 4 files changed, 42 insertions(+), 10 deletions(-) diff --git a/conda/conda_helper_functions.py b/conda/conda_helper_functions.py index 4d950a88bd8e..5c4cdc8c7d12 100644 --- a/conda/conda_helper_functions.py +++ b/conda/conda_helper_functions.py @@ -125,7 +125,7 @@ def parse_csv() -> List[Dict[str, str]]: try: logger.info(f"Downloading CSV from {AZURE_SDK_CSV_URL}") - with urllib.request.urlopen(AZURE_SDK_CSV_URL) as response: + with urllib.request.urlopen(AZURE_SDK_CSV_URL, timeout=10) as response: csv_content = response.read().decode("utf-8") # Parse the CSV content diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index b6524874512c..c8ebe5c988f3 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -368,16 +368,15 @@ def determine_service_info( # defaults package_name = pkg.get(PACKAGE_COL, "") service_name = pkg.get(REPO_PATH_COL, "").lower() - - if bundle_name: - common_root = f"azure/{bundle_name.split('-')[1]}" - else: - common_root = "azure" + common_root = "azure" package_path = get_package_path(package_name) if not service_name and package_path: service_name = os.path.basename(os.path.dirname(package_path)) + if bundle_name and service_name: + common_root = f"azure/{service_name}" + return common_root, service_name @@ -1099,3 +1098,10 @@ def update_mgmt_plane_release_log( ) for pkg_name in data_plane_release_log_results: print(f"- {pkg_name}") + + if mgmt_plane_release_log_results: + print( + "\nThe following management plane packages may require manual adjustments in azure-mgmt release log:" + ) + for pkg_name in mgmt_plane_release_log_results: + print(f"- {pkg_name}") diff --git a/eng/tools/azure-sdk-tools/ci_tools/parsing/parse_functions.py b/eng/tools/azure-sdk-tools/ci_tools/parsing/parse_functions.py index 31a3d8d323d4..ca376d8a5ea5 100644 --- a/eng/tools/azure-sdk-tools/ci_tools/parsing/parse_functions.py +++ b/eng/tools/azure-sdk-tools/ci_tools/parsing/parse_functions.py @@ -16,6 +16,7 @@ # this assumes the presence of "packaging" from packaging.requirements import Requirement +from packaging.version import Version, InvalidVersion from setuptools import Extension from ci_tools.variables import str_to_bool @@ -468,7 +469,7 @@ def get_build_config(package_path: str) -> Optional[Dict[str, Any]]: def get_conda_config(package_path: str) -> Optional[Dict[str, Any]]: """ - Attempts to retrieve all values within [tools.azure-sdk-conda] section of a pyproject.toml. + Attempts to retrieve all values within [tool.azure-sdk-conda] section of a pyproject.toml. """ if os.path.isfile(package_path): package_path = os.path.dirname(package_path) @@ -884,6 +885,14 @@ def classify_release_type(version: str) -> str: :rtype: str :return: Either "beta" or "stable" """ - if "b" in version.lower(): - return "beta" - return "stable" + try: + parsed = Version(version) + # .pre is set for alpha/beta/rc, .dev is set for dev releases + if parsed.pre is not None or parsed.dev is not None: + return "beta" + return "stable" + except InvalidVersion: + # Fallback + if any(marker in version.lower() for marker in ("a", "b", "rc", "dev")): + return "beta" + return "stable" diff --git a/eng/tools/azure-sdk-tools/tests/test_parse_functionality.py b/eng/tools/azure-sdk-tools/tests/test_parse_functionality.py index 1405850a0285..30c6c5ff2343 100644 --- a/eng/tools/azure-sdk-tools/tests/test_parse_functionality.py +++ b/eng/tools/azure-sdk-tools/tests/test_parse_functionality.py @@ -333,3 +333,20 @@ def test_namespace_discovery_with_substantial_content(): assert result == "test.module" finally: os.unlink(temp_file) + + +@patch("ci_tools.parsing.parse_functions.read_setup_py_content") +def test_is_stable_release(test_patch): + test_patch.return_value = """ +from setuptools import setup +setup(name="azure-test", version="1.0.0") +""" + result = ParsedSetup.from_path(setup_project_scenario) + assert result.is_stable_release() == True + + test_patch.return_value = """ +from setuptools import setup +setup(name="azure-test", version="1.0.0b1") +""" + result = ParsedSetup.from_path(setup_project_scenario) + assert result.is_stable_release() == False From aab424f4696bda2be77719092cdc18832fef2596 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Fri, 30 Jan 2026 14:45:34 -0800 Subject: [PATCH 095/112] improve manual next steps --- conda/update_conda_files.py | 11 +++++++++++ eng/pipelines/conda-update-pipeline.yml | 2 +- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index c8ebe5c988f3..be48b336686e 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -1105,3 +1105,14 @@ def update_mgmt_plane_release_log( ) for pkg_name in mgmt_plane_release_log_results: print(f"- {pkg_name}") + + print("\n=== Manual Steps for New Data Plane Packages ===") + print( + "- A dummy placeholder library needs to be requested on Conda for new data plane packages." + ) + print("- A new AKA link needs to be created for each new release log.") + print( + "\nSee the generated PR description for further details. The new data plane package names are:" + ) + for pkg_name in new_data_plane_names: + print(f"{pkg_name}") diff --git a/eng/pipelines/conda-update-pipeline.yml b/eng/pipelines/conda-update-pipeline.yml index 2b2672c06825..c7a4a6db1d66 100644 --- a/eng/pipelines/conda-update-pipeline.yml +++ b/eng/pipelines/conda-update-pipeline.yml @@ -116,6 +116,6 @@ extends: ## Next Steps - [ ] For new data plane packages, submit this form to create a private dummy library placeholder in Conda before uploading a release: https://forms.office.com/Pages/ResponsePage.aspx?id=v4j5cvGGr0GRqy180BHbR180k2XpSUFBtXHTh8-jMUlUNlA1MFpZOVhZME1aNU1EU1Y3SjZRU0JNRC4u - [ ] After this PR is merged and succeeds the Conda build, approve the pipeline to upload the releases to Conda - - [ ] After upload, delete the dummy library and make the new packages publicly available in Conda. + - [ ] After upload, delete the dummy libraries and make the new packages publicly available in Conda. - [ ] Create an AKA link for new release logs here: http://aka.ms/ BaseBranchName: main From 5e562af744e6a338099de96b3e8ff039d4d8ffbf Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Fri, 30 Jan 2026 15:08:49 -0800 Subject: [PATCH 096/112] bundle fix --- conda/update_conda_files.py | 37 ++++++++++++++++++------------------- 1 file changed, 18 insertions(+), 19 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index be48b336686e..bcffdd8ca9f4 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -467,8 +467,10 @@ def generate_data_plane_meta_yaml( """ Generate the meta.yaml content for a data plane package or release group. """ - # assumes env var name is arbitrary and replaced in conda_functions.py - src_distr_name = package_name.split("-")[-1].upper() + # Use bundle_name if available for recipe name and env var derivation + # however, env var name is arbitrary and replaced in conda_functions.py + recipe_name = bundle_name if bundle_name else package_name + src_distr_name = recipe_name.split("-")[-1].upper() src_distribution_env_var = f"{src_distr_name}_SOURCE_DISTRIBUTION" # TODO not sure if this is the best way to get these requirements @@ -514,7 +516,7 @@ def generate_data_plane_meta_yaml( host_reqs_str = "\n - ".join(host_reqs) run_reqs_str = "\n - ".join(run_reqs) pkg_imports_str = "\n - ".join(pkg_imports) - meta_yaml_content = f"""{{% set name = "{package_name}" %}} + meta_yaml_content = f"""{{% set name = "{recipe_name}" %}} package: name: "{{{{ name|lower }}}}" @@ -557,7 +559,6 @@ def generate_data_plane_meta_yaml( def add_new_data_plane_packages( - package_dict: Dict[str, Dict[str, str]], bundle_map: Dict[str, List[str]], new_data_plane_names: List[str], ) -> List[str]: @@ -573,16 +574,18 @@ def add_new_data_plane_packages( for package_name in new_data_plane_names: logger.info(f"Adding new data plane meta.yaml for: {package_name}") - pkg_yaml_path = os.path.join(CONDA_RECIPES_DIR, package_name, "meta.yaml") - os.makedirs(os.path.dirname(pkg_yaml_path), exist_ok=True) - + file_name = package_name bundle_name = get_bundle_name(package_name) + if bundle_name: + if bundle_name in bundles_processed: + logger.info( + f"Meta.yaml for bundle {bundle_name} already created, skipping {package_name}" + ) + continue + file_name = bundle_name - if bundle_name and bundle_name in bundles_processed: - logger.info( - f"Meta.yaml for bundle {bundle_name} already created, skipping {package_name}" - ) - continue + pkg_yaml_path = os.path.join(CONDA_RECIPES_DIR, file_name, "meta.yaml") + os.makedirs(os.path.dirname(pkg_yaml_path), exist_ok=True) try: meta_yml = generate_data_plane_meta_yaml( @@ -613,9 +616,7 @@ def add_new_data_plane_packages( # ===================================== -def add_new_mgmt_plane_packages( - package_dict: Dict[str, Dict[str, str]], new_mgmt_plane_names: List[str] -) -> List[str]: +def add_new_mgmt_plane_packages(new_mgmt_plane_names: List[str]) -> List[str]: """Update azure-mgmt/meta.yaml with new management libraries, and add import tests.""" if len(new_mgmt_plane_names) == 0: return [] @@ -1050,13 +1051,11 @@ def update_mgmt_plane_release_log( # handle new data plane libraries new_data_plane_results = add_new_data_plane_packages( - package_dict, bundle_map, new_data_plane_names + bundle_map, new_data_plane_names ) # handle new mgmt plane libraries - new_mgmt_plane_results = add_new_mgmt_plane_packages( - package_dict, new_mgmt_plane_names - ) + new_mgmt_plane_results = add_new_mgmt_plane_packages(new_mgmt_plane_names) # add/update release logs data_plane_release_log_results = update_data_plane_release_logs( From 696fd1fe5fcfc9b0247c250e6e555b7ca8f6bb68 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Fri, 30 Jan 2026 16:05:27 -0800 Subject: [PATCH 097/112] properly import submodules --- conda/conda_helper_functions.py | 51 +++++++++++++++++++++++++++++++++ conda/update_conda_files.py | 28 +++++++++--------- 2 files changed, 64 insertions(+), 15 deletions(-) diff --git a/conda/conda_helper_functions.py b/conda/conda_helper_functions.py index 5c4cdc8c7d12..13ee1a1505d3 100644 --- a/conda/conda_helper_functions.py +++ b/conda/conda_helper_functions.py @@ -260,3 +260,54 @@ def build_package_index(conda_artifacts: List[Dict]) -> Dict[str, Tuple[int, int if package_name: package_index[package_name] = (artifact_idx, checkout_idx) return package_index + + +def get_valid_package_imports(package_name: str) -> List[str]: + """ + Inspect the package's actual module structure and return only valid imports. + + This avoids assuming all packages have .aio, .aio.operations, .models, and .operations + submodules, since not all packages have the same structure. + + :param package_name: The name of the package (e.g., "azure-mgmt-advisor" or "azure-eventgrid"). + :return: List of valid module names for import (e.g., ["azure.eventgrid", "azure.eventgrid.aio"]). + """ + module_name = package_name.replace("-", ".") + imports = [module_name] + + package_path = get_package_path(package_name) + if not package_path: + logger.warning( + f"Could not find package path for {package_name}, using base import only" + ) + return imports + + # Construct the path to the actual module directory + # e.g., azure-mgmt-advisor -> azure/mgmt/advisor + module_parts = module_name.split(".") + module_dir = os.path.join(package_path, *module_parts) + + if not os.path.isdir(module_dir): + logger.warning( + f"Module directory not found for {package_name} at {module_dir}, using base import only" + ) + return imports + + # Check for common submodules and only add if they exist + submodules_to_check = ["aio", "models", "operations"] + + for submodule_name in submodules_to_check: + submodule_path = os.path.join(module_dir, submodule_name) + if os.path.isdir(submodule_path) and os.path.exists( + os.path.join(submodule_path, "__init__.py") + ): + imports.append(f"{module_name}.{submodule_name}") + + # Check for aio.operations (nested submodule) + aio_operations_path = os.path.join(module_dir, "aio", "operations") + if os.path.isdir(aio_operations_path) and os.path.exists( + os.path.join(aio_operations_path, "__init__.py") + ): + imports.append(f"{module_name}.aio.operations") + + return imports diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index bcffdd8ca9f4..4a1750deaf30 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -19,6 +19,7 @@ get_package_path, get_bundle_name, map_bundle_to_packages, + get_valid_package_imports, PACKAGE_COL, VERSION_GA_COL, LATEST_GA_DATE_COL, @@ -474,7 +475,6 @@ def generate_data_plane_meta_yaml( src_distribution_env_var = f"{src_distr_name}_SOURCE_DISTRIBUTION" # TODO not sure if this is the best way to get these requirements - # TODO don't think this covers all possible import tests, e.g. azure.eventgrid, azure.eventgrid.aio <- when would I add that? if bundle_name: # handle grouped packages logger.info( @@ -492,9 +492,11 @@ def generate_data_plane_meta_yaml( host_reqs.update(pkg_host_reqs) run_reqs.update(pkg_run_reqs) - pkg_imports.append(pkg.replace("-", ".")) + # Get valid imports for this package (including .aio if it exists) + pkg_imports.extend(get_valid_package_imports(pkg)) host_reqs = list(host_reqs) run_reqs = list(run_reqs) + pkg_imports = list(set(pkg_imports)) # deduplicate package_path = get_package_path(bundle_map[bundle_name][0]) home_url, summary, description = get_package_metadata( @@ -506,7 +508,8 @@ def generate_data_plane_meta_yaml( parsed_setup = ParsedSetup.from_path(package_path) host_reqs, run_reqs = get_package_requirements(parsed_setup) - pkg_imports = [package_name.replace("-", ".")] + # Get valid imports for this package (including .aio if it exists) + pkg_imports = get_valid_package_imports(package_name) home_url, summary, description = get_package_metadata( package_name, package_path @@ -648,18 +651,13 @@ def add_new_mgmt_plane_packages(new_mgmt_plane_names: List[str]) -> List[str]: logger.warning("Skipping package with missing name") continue - module_name = package_name.replace("-", ".") - - imports = [ - f"- {module_name}", - f"- {module_name}.aio", - f"- {module_name}.aio.operations", - f"- {module_name}.models", - f"- {module_name}.operations", - ] - - new_imports.extend(imports) - logger.info(f"Generated import statements for {package_name}") + imports = get_valid_package_imports(package_name) + # Format imports for YAML with "- " prefix + formatted = [f"- {imp}" for imp in imports] + new_imports.extend(formatted) + logger.info( + f"Generated {len(imports)} import statements for {package_name}: {formatted}" + ) all_imports = list(set(existing_imports + new_imports)) From 3e6d2115d5b8a35df953e89f0f23ee0321db9945 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Mon, 2 Feb 2026 09:32:39 -0800 Subject: [PATCH 098/112] release log bundle bug fix --- conda/update_conda_files.py | 42 +++++++++++++++++++------------------ 1 file changed, 22 insertions(+), 20 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 4a1750deaf30..b476d5350441 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -383,14 +383,14 @@ def determine_service_info( def format_requirement(req: str) -> str: """Format a requirement string for conda meta.yaml.""" - name_unpinned = re.split(r"[>==={{{{ environ.get('AZURESDK_CONDA_VERSION', '0.0.0') }}}}" - # filter out ~ for yml format - req = req.replace("~", "") + # translate compatible release (~=) to >= for yml + req = req.replace("~=", ">=") return req @@ -816,17 +816,18 @@ def update_data_plane_release_logs( # check for bundle if bundle_name: release_log_path = os.path.join(CONDA_RELEASE_LOGS_DIR, f"{bundle_name}.md") + display_name = bundle_name else: release_log_path = os.path.join( CONDA_RELEASE_LOGS_DIR, f"{package_name}.md" ) - bundle_name = package_name # for release log logic below + display_name = package_name # for release log logic below if not os.path.exists(release_log_path): # Add brand new release log file - logger.info(f"Creating new release log for: {bundle_name}") + logger.info(f"Creating new release log for: {display_name}") - title_parts = bundle_name.replace("azure-", "").split("-") + title_parts = display_name.replace("azure-", "").split("-") title = " ".join(word.title() for word in title_parts) content = f"# Azure {title} client library for Python (conda)\n\n" @@ -849,14 +850,14 @@ def update_data_plane_release_logs( try: with open(release_log_path, "w") as f: f.write(content) - logger.info(f"Created new release log for {bundle_name}") + logger.info(f"Created new release log for {display_name}") except Exception as e: - logger.error(f"Failed to create release log for {bundle_name}: {e}") - result.append(bundle_name) + logger.error(f"Failed to create release log for {display_name}: {e}") + result.append(display_name) else: logger.info( - f"Release log for {bundle_name} already exists, check that new package {package_name} is included" + f"Release log for {display_name} already exists, check that new package {package_name} is included" ) return result @@ -1103,13 +1104,14 @@ def update_mgmt_plane_release_log( for pkg_name in mgmt_plane_release_log_results: print(f"- {pkg_name}") - print("\n=== Manual Steps for New Data Plane Packages ===") - print( - "- A dummy placeholder library needs to be requested on Conda for new data plane packages." - ) - print("- A new AKA link needs to be created for each new release log.") - print( - "\nSee the generated PR description for further details. The new data plane package names are:" - ) - for pkg_name in new_data_plane_names: - print(f"{pkg_name}") + if len(new_data_plane_names) > 0: + print("\n=== Manual Steps for New Data Plane Packages ===") + print( + "- A dummy placeholder library needs to be requested on Conda for new data plane packages." + ) + print("- A new AKA link needs to be created for each new release log.") + print( + "\nSee the generated PR description for further details. The new data plane package names are:" + ) + for pkg_name in new_data_plane_names: + print(f"{pkg_name}") From 3d3dd4ffed9b108a8daed33709d9cc9f9cb49b1d Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Mon, 2 Feb 2026 13:27:57 -0800 Subject: [PATCH 099/112] check stable release thru pypi, update missed pyproject --- conda/conda_helper_functions.py | 31 +++++++++++++++++++++++- sdk/ai/azure-ai-voicelive/pyproject.toml | 5 +++- 2 files changed, 34 insertions(+), 2 deletions(-) diff --git a/conda/conda_helper_functions.py b/conda/conda_helper_functions.py index 13ee1a1505d3..38ba809b6a6b 100644 --- a/conda/conda_helper_functions.py +++ b/conda/conda_helper_functions.py @@ -11,6 +11,7 @@ import urllib.request from datetime import datetime from ci_tools.parsing import ParsedSetup +from packaging.version import Version ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) SDK_DIR = os.path.join(ROOT_DIR, "sdk") @@ -60,7 +61,7 @@ def get_bundle_name(package_name: str) -> Optional[str]: conda_config = parsed.get_conda_config() if not conda_config: - if parsed.is_stable_release(): + if is_stable_on_pypi(package_name): raise Exception( f"Stable release package {package_name} needs a conda config" ) @@ -220,6 +221,34 @@ def package_needs_update( return False +def is_stable_on_pypi(package_name: str) -> bool: + """ + Check if a package has a stable (GA) release on PyPI. + + Uses PEP 440 version parsing to determine if the latest version is a pre-release. + + :param package_name: The name of the package to check. + :return: True if the latest PyPI version is stable, False otherwise. + """ + pypi_url = f"https://pypi.org/pypi/{package_name}/json" + try: + with urllib.request.urlopen(pypi_url, timeout=10) as response: + data = json.loads(response.read().decode("utf-8")) + latest_version = data["info"]["version"] + parsed_version = Version(latest_version) + + if parsed_version.is_prerelease: + logger.debug(f"Package {package_name} version {latest_version} is pre-release") + return False + + logger.debug(f"Package {package_name} version {latest_version} is stable") + return True + + except Exception as e: + logger.warning(f"Failed to check PyPI for {package_name}: {e}") + return False + + def get_package_data_from_pypi( package_name: str, ) -> Tuple[Optional[str], Optional[str]]: diff --git a/sdk/ai/azure-ai-voicelive/pyproject.toml b/sdk/ai/azure-ai-voicelive/pyproject.toml index a9e8a901bc41..63c74d9d554d 100644 --- a/sdk/ai/azure-ai-voicelive/pyproject.toml +++ b/sdk/ai/azure-ai-voicelive/pyproject.toml @@ -76,4 +76,7 @@ exclude = [ pytyped = ["py.typed"] [tool.pytest.ini_options] asyncio_default_fixture_loop_scope = "function" -asyncio_mode = "auto" \ No newline at end of file +asyncio_mode = "auto" + +[tool.azure-sdk-conda] +in_bundle = false \ No newline at end of file From bc50957f9e65cfe8bb79a1dcdeb6e1cd33b87df1 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Mon, 2 Feb 2026 13:40:05 -0800 Subject: [PATCH 100/112] clean, remove unneeded function and uamqp exception --- conda/conda_helper_functions.py | 3 -- .../ci_tools/parsing/parse_functions.py | 30 ------------------- .../tests/test_parse_functionality.py | 17 ----------- 3 files changed, 50 deletions(-) diff --git a/conda/conda_helper_functions.py b/conda/conda_helper_functions.py index 38ba809b6a6b..b180118e4765 100644 --- a/conda/conda_helper_functions.py +++ b/conda/conda_helper_functions.py @@ -197,9 +197,6 @@ def package_needs_update( ) if not compare_date: - if not is_new and package_row.get(PACKAGE_COL) == "uamqp": - return True # uamqp is an exception - logger.debug( f"Package {package_row.get(PACKAGE_COL)} is skipped due to missing {FIRST_GA_DATE_COL if is_new else LATEST_GA_DATE_COL}." ) diff --git a/eng/tools/azure-sdk-tools/ci_tools/parsing/parse_functions.py b/eng/tools/azure-sdk-tools/ci_tools/parsing/parse_functions.py index ca376d8a5ea5..5c8504ba4fbb 100644 --- a/eng/tools/azure-sdk-tools/ci_tools/parsing/parse_functions.py +++ b/eng/tools/azure-sdk-tools/ci_tools/parsing/parse_functions.py @@ -361,15 +361,6 @@ def get_config_setting(self, setting: str, default: Any = True) -> Any: def is_reporting_suppressed(self, setting: str) -> bool: return compare_string_to_glob_array(setting, self.get_config_setting("suppressed_skip_warnings", [])) - def is_stable_release(self) -> bool: - """ - Check if this package is a stable release version. - - :rtype: bool - :return: True if this is a stable release, False if beta - """ - return classify_release_type(self.version) == "stable" - def __str__(self): lines = [f"ParsedSetup from {self.folder}"] for attr in [ @@ -875,24 +866,3 @@ def compare_string_to_glob_array(string: str, glob_array: List[str]) -> bool: This function is used to easily compare a string to a set of glob strings, if it matches any of them, returns True. """ return any([fnmatch.fnmatch(string, glob) for glob in glob_array]) - - -def classify_release_type(version: str) -> str: - """ - Classify a package version as 'beta' or 'stable' based on version string patterns. - - :param str version: The version string to classify (e.g., "1.0.0", "2.1.0b1", "1.5.0a2") - :rtype: str - :return: Either "beta" or "stable" - """ - try: - parsed = Version(version) - # .pre is set for alpha/beta/rc, .dev is set for dev releases - if parsed.pre is not None or parsed.dev is not None: - return "beta" - return "stable" - except InvalidVersion: - # Fallback - if any(marker in version.lower() for marker in ("a", "b", "rc", "dev")): - return "beta" - return "stable" diff --git a/eng/tools/azure-sdk-tools/tests/test_parse_functionality.py b/eng/tools/azure-sdk-tools/tests/test_parse_functionality.py index 30c6c5ff2343..1405850a0285 100644 --- a/eng/tools/azure-sdk-tools/tests/test_parse_functionality.py +++ b/eng/tools/azure-sdk-tools/tests/test_parse_functionality.py @@ -333,20 +333,3 @@ def test_namespace_discovery_with_substantial_content(): assert result == "test.module" finally: os.unlink(temp_file) - - -@patch("ci_tools.parsing.parse_functions.read_setup_py_content") -def test_is_stable_release(test_patch): - test_patch.return_value = """ -from setuptools import setup -setup(name="azure-test", version="1.0.0") -""" - result = ParsedSetup.from_path(setup_project_scenario) - assert result.is_stable_release() == True - - test_patch.return_value = """ -from setuptools import setup -setup(name="azure-test", version="1.0.0b1") -""" - result = ParsedSetup.from_path(setup_project_scenario) - assert result.is_stable_release() == False From 5b8009e55d60ba5dcc205b47ec339b072e2a31ff Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Mon, 2 Feb 2026 14:11:19 -0800 Subject: [PATCH 101/112] cleanup --- conda/update_conda_files.py | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index b476d5350441..84e628b72584 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -389,7 +389,7 @@ def format_requirement(req: str) -> str: if name_unpinned.startswith("azure-") or name_unpinned in ["msrest"]: return f"{name_unpinned} >={{{{ environ.get('AZURESDK_CONDA_VERSION', '0.0.0') }}}}" - # translate compatible release (~=) to >= for yml + # translate compatible release (~=) to >= for yml req = req.replace("~=", ">=") return req @@ -474,7 +474,6 @@ def generate_data_plane_meta_yaml( src_distr_name = recipe_name.split("-")[-1].upper() src_distribution_env_var = f"{src_distr_name}_SOURCE_DISTRIBUTION" - # TODO not sure if this is the best way to get these requirements if bundle_name: # handle grouped packages logger.info( @@ -577,7 +576,7 @@ def add_new_data_plane_packages( for package_name in new_data_plane_names: logger.info(f"Adding new data plane meta.yaml for: {package_name}") - file_name = package_name + folder_name = package_name bundle_name = get_bundle_name(package_name) if bundle_name: if bundle_name in bundles_processed: @@ -585,9 +584,9 @@ def add_new_data_plane_packages( f"Meta.yaml for bundle {bundle_name} already created, skipping {package_name}" ) continue - file_name = bundle_name + folder_name = bundle_name - pkg_yaml_path = os.path.join(CONDA_RECIPES_DIR, file_name, "meta.yaml") + pkg_yaml_path = os.path.join(CONDA_RECIPES_DIR, folder_name, "meta.yaml") os.makedirs(os.path.dirname(pkg_yaml_path), exist_ok=True) try: @@ -815,13 +814,11 @@ def update_data_plane_release_logs( bundle_name = get_bundle_name(package_name) # check for bundle if bundle_name: - release_log_path = os.path.join(CONDA_RELEASE_LOGS_DIR, f"{bundle_name}.md") display_name = bundle_name else: - release_log_path = os.path.join( - CONDA_RELEASE_LOGS_DIR, f"{package_name}.md" - ) - display_name = package_name # for release log logic below + display_name = package_name + + release_log_path = os.path.join(CONDA_RELEASE_LOGS_DIR, f"{display_name}.md") if not os.path.exists(release_log_path): # Add brand new release log file From b14e059989a2ed4a7e88908d754154cd37c88c90 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Wed, 4 Feb 2026 11:09:29 -0800 Subject: [PATCH 102/112] cache glob results --- conda/conda_helper_functions.py | 37 ++++++++++++++++++++++----------- 1 file changed, 25 insertions(+), 12 deletions(-) diff --git a/conda/conda_helper_functions.py b/conda/conda_helper_functions.py index b180118e4765..1d9c769b5c7c 100644 --- a/conda/conda_helper_functions.py +++ b/conda/conda_helper_functions.py @@ -4,6 +4,7 @@ import os import glob +from functools import lru_cache from typing import Dict, List, Optional, Tuple import csv import json @@ -32,14 +33,30 @@ # ===================================== +@lru_cache(maxsize=None) +def _build_package_path_index() -> Dict[str, str]: + """ + Build a one-time index mapping package names to their filesystem paths. + + This scans the sdk/ directory once and caches the result for all subsequent lookups. + """ + all_paths = glob.glob(os.path.join(SDK_DIR, "*", "*")) + # Exclude temp directories like .tox, .venv, __pycache__, etc. + return { + os.path.basename(p): p + for p in all_paths + if os.path.isdir(p) and not os.path.basename(p).startswith((".", "__")) + } + + def get_package_path(package_name: str) -> Optional[str]: """Get the filesystem path of an SDK package given its name.""" - pattern = os.path.join(SDK_DIR, "**", package_name) - matches = glob.glob(pattern, recursive=True) - if not matches: + path_index = _build_package_path_index() + package_path = path_index.get(package_name) + if not package_path: logger.error(f"Package path not found for package: {package_name}") return None - return matches[0] + return package_path def get_bundle_name(package_name: str) -> Optional[str]: @@ -81,13 +98,7 @@ def get_bundle_name(package_name: str) -> Optional[str]: def map_bundle_to_packages(package_names: List[str]) -> Dict[str, List[str]]: """Create a mapping of bundle names to their constituent package names.""" logger.info("Mapping bundle names to packages...") - all_paths = glob.glob(os.path.join(SDK_DIR, "*", "*")) - # Exclude temp directories like .tox, .venv, __pycache__, etc. - path_lookup = { - os.path.basename(p): p - for p in all_paths - if os.path.isdir(p) and not os.path.basename(p).startswith((".", "__")) - } + path_lookup = _build_package_path_index() bundle_map = {} for package_name in package_names: @@ -235,7 +246,9 @@ def is_stable_on_pypi(package_name: str) -> bool: parsed_version = Version(latest_version) if parsed_version.is_prerelease: - logger.debug(f"Package {package_name} version {latest_version} is pre-release") + logger.debug( + f"Package {package_name} version {latest_version} is pre-release" + ) return False logger.debug(f"Package {package_name} version {latest_version} is stable") From acd4c1f954d7e4e20b4a0777adc85821a1b44caf Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Wed, 4 Feb 2026 14:09:11 -0800 Subject: [PATCH 103/112] minor fix to use service dir for metadata desc --- conda/update_conda_files.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 84e628b72584..b57b03efaa91 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -450,7 +450,7 @@ def get_package_metadata( else: home_url = f"https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/{service_dir}/{package_name}" - summary = f"Microsoft Azure {package_name.replace('azure-', '').replace('-', ' ').title()} Client Library for Python" + summary = f"Microsoft Azure {service_dir.replace('-', ' ').title()} Client Library for Python" conda_url = f"https://aka.ms/azsdk/conda/releases/{service_dir}" description = ( From 428117f6716feed226b8b4048f80c14cc2ac3299 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Thu, 5 Feb 2026 09:45:27 -0800 Subject: [PATCH 104/112] use pypi_tools --- conda/conda_helper_functions.py | 67 +++++++++++++++------------------ 1 file changed, 31 insertions(+), 36 deletions(-) diff --git a/conda/conda_helper_functions.py b/conda/conda_helper_functions.py index 1d9c769b5c7c..15078dc126ab 100644 --- a/conda/conda_helper_functions.py +++ b/conda/conda_helper_functions.py @@ -13,6 +13,8 @@ from datetime import datetime from ci_tools.parsing import ParsedSetup from packaging.version import Version +from pypi_tools.pypi import PyPIClient, retrieve_versions_from_pypi + ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) SDK_DIR = os.path.join(ROOT_DIR, "sdk") @@ -83,7 +85,6 @@ def get_bundle_name(package_name: str) -> Optional[str]: f"Stable release package {package_name} needs a conda config" ) - # beta or alpha package are not released logger.warning( f"No conda config found for package {package_name}, which may be a pre-release" ) @@ -231,28 +232,25 @@ def package_needs_update( def is_stable_on_pypi(package_name: str) -> bool: """ - Check if a package has a stable (GA) release on PyPI. - - Uses PEP 440 version parsing to determine if the latest version is a pre-release. + Check if a package has any stable (GA) release on PyPI. :param package_name: The name of the package to check. - :return: True if the latest PyPI version is stable, False otherwise. + :return: True if any stable version exists on PyPI, False otherwise. """ - pypi_url = f"https://pypi.org/pypi/{package_name}/json" try: - with urllib.request.urlopen(pypi_url, timeout=10) as response: - data = json.loads(response.read().decode("utf-8")) - latest_version = data["info"]["version"] - parsed_version = Version(latest_version) - - if parsed_version.is_prerelease: - logger.debug( - f"Package {package_name} version {latest_version} is pre-release" - ) - return False - - logger.debug(f"Package {package_name} version {latest_version} is stable") - return True + versions = retrieve_versions_from_pypi(package_name) + if not versions: + logger.warning(f"No versions found on PyPI for {package_name}") + return False + + # Check if any version is stable (not a prerelease) + for v in versions: + if not Version(v).is_prerelease: + logger.debug(f"Package {package_name} has stable version {v}") + return True + + logger.debug(f"Package {package_name} has no stable versions") + return False except Exception as e: logger.warning(f"Failed to check PyPI for {package_name}: {e}") @@ -263,25 +261,22 @@ def get_package_data_from_pypi( package_name: str, ) -> Tuple[Optional[str], Optional[str]]: """Fetch the latest version and download URI for a package from PyPI.""" - pypi_url = f"https://pypi.org/pypi/{package_name}/json" try: - with urllib.request.urlopen(pypi_url, timeout=10) as response: - data = json.loads(response.read().decode("utf-8")) - - # Get the latest version - latest_version = data["info"]["version"] - if latest_version in data["releases"] and data["releases"][latest_version]: - # Get the source distribution (sdist) if available - files = data["releases"][latest_version] - source_dist = next( - (f for f in files if f["packagetype"] == "sdist"), None + client = PyPIClient() + data = client.project(package_name) + + # Get the latest version + latest_version = data["info"]["version"] + if latest_version in data["releases"] and data["releases"][latest_version]: + # Get the source distribution (sdist) if available + files = data["releases"][latest_version] + source_dist = next((f for f in files if f["packagetype"] == "sdist"), None) + if source_dist: + download_url = source_dist["url"] + logger.info( + f"Found download URL for {package_name}=={latest_version}: {download_url}" ) - if source_dist: - download_url = source_dist["url"] - logger.info( - f"Found download URL for {package_name}=={latest_version}: {download_url}" - ) - return latest_version, download_url + return latest_version, download_url except Exception as e: logger.error(f"Failed to fetch download URI from PyPI for {package_name}: {e}") From ba5ebacf42bbc02a8d492f5f428acf7b7acd85c2 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Thu, 5 Feb 2026 10:12:29 -0800 Subject: [PATCH 105/112] simplify bundle map creation, only look at data pkgs --- conda/conda_helper_functions.py | 27 +++++++-------------------- conda/update_conda_files.py | 6 +++--- 2 files changed, 10 insertions(+), 23 deletions(-) diff --git a/conda/conda_helper_functions.py b/conda/conda_helper_functions.py index 15078dc126ab..9a88bd43d583 100644 --- a/conda/conda_helper_functions.py +++ b/conda/conda_helper_functions.py @@ -99,32 +99,19 @@ def get_bundle_name(package_name: str) -> Optional[str]: def map_bundle_to_packages(package_names: List[str]) -> Dict[str, List[str]]: """Create a mapping of bundle names to their constituent package names.""" logger.info("Mapping bundle names to packages...") - path_lookup = _build_package_path_index() bundle_map = {} for package_name in package_names: logger.debug(f"Processing package for bundle mapping: {package_name}") - package_path = path_lookup.get(package_name) - if not package_path: - logger.warning(f"Package path not found for {package_name}") + try: + bundle_name = get_bundle_name(package_name) + if bundle_name: + logger.debug(f"Bundle name for package {package_name}: {bundle_name}") + bundle_map.setdefault(bundle_name, []).append(package_name) + except Exception as e: + logger.error(f"Failed to get bundle name for {package_name}: {e}") continue - # Skip directories without pyproject.toml - if not os.path.exists(os.path.join(package_path, "pyproject.toml")): - logger.warning(f"Skipping {package_name}: no pyproject.toml found") - continue - - parsed = ParsedSetup.from_path(package_path) - if not parsed: - logger.error(f"Failed to parse setup for package {package_name}") - continue - - conda_config = parsed.get_conda_config() - if conda_config and "bundle_name" in conda_config: - bundle_name = conda_config["bundle_name"] - logger.debug(f"Bundle name for package {package_name}: {bundle_name}") - bundle_map.setdefault(bundle_name, []).append(package_name) - return bundle_map diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index b57b03efaa91..cf3ee2e3eb46 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -1038,9 +1038,9 @@ def update_mgmt_plane_release_log( package_dict, outdated_package_names, new_data_plane_names, new_mgmt_plane_names ) - # pre-process bundled packages to minimize file writes for new data plane packages, - # and release logs - bundle_map = map_bundle_to_packages(list(package_dict.keys())) + # pre-process bundled data packages to minimize file writes for new data plane packages, + # and release logs (mgmt packages are always bundled together) + bundle_map = map_bundle_to_packages([pkg.get(PACKAGE_COL, "") for pkg in data_pkgs]) logger.info( f"Identified {len(bundle_map)} release bundles from package data: {bundle_map}" ) From fba33def58e6363b3839d1558873c79dc066ee6c Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Thu, 5 Feb 2026 10:23:46 -0800 Subject: [PATCH 106/112] actually catch pkgs w/o conda config when making the map --- conda/conda_helper_functions.py | 13 ++++++++++--- conda/update_conda_files.py | 11 ++++++++++- 2 files changed, 20 insertions(+), 4 deletions(-) diff --git a/conda/conda_helper_functions.py b/conda/conda_helper_functions.py index 9a88bd43d583..4efccb3b5d3d 100644 --- a/conda/conda_helper_functions.py +++ b/conda/conda_helper_functions.py @@ -96,11 +96,17 @@ def get_bundle_name(package_name: str) -> Optional[str]: return None -def map_bundle_to_packages(package_names: List[str]) -> Dict[str, List[str]]: - """Create a mapping of bundle names to their constituent package names.""" +def map_bundle_to_packages( + package_names: List[str], +) -> Tuple[Dict[str, List[str]], List[str]]: + """Create a mapping of bundle names to their constituent package names. + + :return: Tuple of (bundle_map, failed_packages) where failed_packages are packages that threw exceptions. + """ logger.info("Mapping bundle names to packages...") bundle_map = {} + failed_packages = [] for package_name in package_names: logger.debug(f"Processing package for bundle mapping: {package_name}") try: @@ -110,9 +116,10 @@ def map_bundle_to_packages(package_names: List[str]) -> Dict[str, List[str]]: bundle_map.setdefault(bundle_name, []).append(package_name) except Exception as e: logger.error(f"Failed to get bundle name for {package_name}: {e}") + failed_packages.append(package_name) continue - return bundle_map + return bundle_map, failed_packages # ===================================== diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index cf3ee2e3eb46..42b6426f1d67 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -1040,7 +1040,9 @@ def update_mgmt_plane_release_log( # pre-process bundled data packages to minimize file writes for new data plane packages, # and release logs (mgmt packages are always bundled together) - bundle_map = map_bundle_to_packages([pkg.get(PACKAGE_COL, "") for pkg in data_pkgs]) + bundle_map, bundle_failed_pkgs = map_bundle_to_packages( + [pkg.get(PACKAGE_COL, "") for pkg in data_pkgs] + ) logger.info( f"Identified {len(bundle_map)} release bundles from package data: {bundle_map}" ) @@ -1101,6 +1103,13 @@ def update_mgmt_plane_release_log( for pkg_name in mgmt_plane_release_log_results: print(f"- {pkg_name}") + if bundle_failed_pkgs: + print( + "\nThe following packages errored when constructing bundle map, they may need a [tool.azure-sdk-conda] section in their pyproject.toml for proper release grouping, and may have been improperly individually processed." + ) + for pkg_name in bundle_failed_pkgs: + print(f"- {pkg_name}") + if len(new_data_plane_names) > 0: print("\n=== Manual Steps for New Data Plane Packages ===") print( From 887786e397a9a917a275cef798af49a7274b1bd7 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Thu, 5 Feb 2026 11:00:52 -0800 Subject: [PATCH 107/112] use namespace for module name instead of naive parsing --- conda/conda_helper_functions.py | 22 +++++++++++++--------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/conda/conda_helper_functions.py b/conda/conda_helper_functions.py index 4efccb3b5d3d..f3ec8a42d710 100644 --- a/conda/conda_helper_functions.py +++ b/conda/conda_helper_functions.py @@ -294,24 +294,28 @@ def get_valid_package_imports(package_name: str) -> List[str]: """ Inspect the package's actual module structure and return only valid imports. - This avoids assuming all packages have .aio, .aio.operations, .models, and .operations - submodules, since not all packages have the same structure. - :param package_name: The name of the package (e.g., "azure-mgmt-advisor" or "azure-eventgrid"). :return: List of valid module names for import (e.g., ["azure.eventgrid", "azure.eventgrid.aio"]). """ - module_name = package_name.replace("-", ".") - imports = [module_name] - package_path = get_package_path(package_name) if not package_path: logger.warning( - f"Could not find package path for {package_name}, using base import only" + f"Could not find package path for {package_name} to determine imports, using fallback" ) - return imports + return [package_name.replace("-", ".")] + else: + parsed = ParsedSetup.from_path(package_path) + if not parsed or not parsed.namespace: + logger.warning( + f"Could not parse namespace for {package_name}, using fallback" + ) + module_name = package_name.replace("-", ".") + else: + module_name = parsed.namespace + + imports = [module_name] # Construct the path to the actual module directory - # e.g., azure-mgmt-advisor -> azure/mgmt/advisor module_parts = module_name.split(".") module_dir = os.path.join(package_path, *module_parts) From 3b9e1d680e3b1251fabf6a2ed3a9dee3abd7d39b Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Thu, 5 Feb 2026 12:30:06 -0800 Subject: [PATCH 108/112] error if somehow someway version is ever missing from processed pkgs --- conda/update_conda_files.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 42b6426f1d67..9a56c564551c 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -733,7 +733,8 @@ def update_data_plane_release_logs( if version: pkg_updates.append(f"- {pkg_name}-{version}") else: - logger.warning( + # shouldn't happen, but fallback + logger.error( f"Package {pkg_name} in group {curr_service_name} is missing version info, it may be deprecated. Skipping in release log update" ) result.append(pkg_name) @@ -742,7 +743,7 @@ def update_data_plane_release_logs( if curr_service_name in PACKAGES_WITH_DOWNLOAD_URI: version = PACKAGES_WITH_DOWNLOAD_URI[curr_service_name] if not version: - logger.warning( + logger.error( f"Package {curr_service_name} with download_uri is missing version info, it may be deprecated. Skipping in release log update" ) result.append(curr_service_name) @@ -754,7 +755,7 @@ def update_data_plane_release_logs( if version: pkg_updates.append(f"- {curr_service_name}-{version}") else: - logger.warning( + logger.error( f"Package {curr_service_name} is missing version info, it may be deprecated. Skipping in release log update" ) result.append(curr_service_name) From 1b194f1ab2a17ac4ea304f6c15fdf34df66b909f Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Thu, 5 Feb 2026 12:50:06 -0800 Subject: [PATCH 109/112] never allow null pkg_path for getting service_dir metadata --- conda/conda_helper_functions.py | 2 +- conda/update_conda_files.py | 27 ++++++++++++++++++++++----- 2 files changed, 23 insertions(+), 6 deletions(-) diff --git a/conda/conda_helper_functions.py b/conda/conda_helper_functions.py index f3ec8a42d710..0ffb5adf630a 100644 --- a/conda/conda_helper_functions.py +++ b/conda/conda_helper_functions.py @@ -56,7 +56,7 @@ def get_package_path(package_name: str) -> Optional[str]: path_index = _build_package_path_index() package_path = path_index.get(package_name) if not package_path: - logger.error(f"Package path not found for package: {package_name}") + logger.warning(f"Package path not found for package: {package_name}") return None return package_path diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 9a56c564551c..f593b39cdb34 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -429,7 +429,7 @@ def get_package_requirements(parsed: ParsedSetup) -> Tuple[List[str], List[str]] def get_package_metadata( - package_name: str, package_path: Optional[str], is_bundle: bool = False + package_name: str, package_path: str, is_bundle: bool = False ) -> Tuple[str, str, str]: """Extract package metadata for about section in meta.yaml. @@ -437,10 +437,7 @@ def get_package_metadata( :param package_path: The filesystem path to the package. :param is_bundle: Whether this is a release bundle (affects URL structure). """ - if package_path: - service_dir = os.path.basename(os.path.dirname(package_path)) - else: - service_dir = package_name.replace("azure-", "") + service_dir = os.path.basename(os.path.dirname(package_path)) # For bundles, URL points to service directory; for individual packages, include package name if is_bundle: @@ -498,12 +495,25 @@ def generate_data_plane_meta_yaml( pkg_imports = list(set(pkg_imports)) # deduplicate package_path = get_package_path(bundle_map[bundle_name][0]) + + if not package_path: + logger.error( + f"Could not find package path for {bundle_name} to extract metadata, skipping meta.yaml generation" + ) + return "" + home_url, summary, description = get_package_metadata( bundle_name, package_path, is_bundle=True ) else: logger.info(f"Generating meta.yaml for package {package_name}") package_path = get_package_path(package_name) + + if not package_path: + logger.error( + f"Could not find package path for {package_name} to extract metadata, skipping meta.yaml generation" + ) + return "" parsed_setup = ParsedSetup.from_path(package_path) host_reqs, run_reqs = get_package_requirements(parsed_setup) @@ -602,6 +612,13 @@ def add_new_data_plane_packages( result.append(package_name) continue + if not meta_yml: + logger.error( + f"Meta.yaml content for {package_name} is empty, skipping file creation" + ) + result.append(package_name) + continue + try: with open(pkg_yaml_path, "w") as f: f.write(meta_yml) From 93e015d21f5bd085ac5706821a1efd98a7e48c37 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Thu, 5 Feb 2026 14:32:10 -0800 Subject: [PATCH 110/112] trim essential reqs list --- conda/update_conda_files.py | 13 +++---------- 1 file changed, 3 insertions(+), 10 deletions(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index f593b39cdb34..8a435a20d6e8 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -396,18 +396,11 @@ def format_requirement(req: str) -> str: def get_package_requirements(parsed: ParsedSetup) -> Tuple[List[str], List[str]]: """Retrieve the host and run requirements for a data plane package meta.yaml.""" - host_requirements = set(["pip"]) - run_requirements = set() + host_requirements = set(["pip", "python"]) + run_requirements = set(["python"]) # reqs commonly seen in existing meta.yaml files that aren't always in setup.py or pyproject.toml - for essential_req in [ - "azure-identity", - "azure-core", - "python", - "aiohttp", - "requests-oauthlib >=0.5.0", - "cryptography", - ]: + for essential_req in ["azure-identity", "azure-core", "aiohttp"]: req_name = format_requirement(essential_req) host_requirements.add(req_name) run_requirements.add(req_name) From d389be31591d5f073fc8217b5dffe332d5d3e28d Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Thu, 5 Feb 2026 15:01:40 -0800 Subject: [PATCH 111/112] dont need to import types --- conda/conda_helper_functions.py | 25 +++++++++--------- conda/update_conda_files.py | 46 ++++++++++++++++----------------- 2 files changed, 35 insertions(+), 36 deletions(-) diff --git a/conda/conda_helper_functions.py b/conda/conda_helper_functions.py index 0ffb5adf630a..2b9e0b2d8e35 100644 --- a/conda/conda_helper_functions.py +++ b/conda/conda_helper_functions.py @@ -5,9 +5,8 @@ import os import glob from functools import lru_cache -from typing import Dict, List, Optional, Tuple +from typing import Optional import csv -import json from ci_tools.logging import logger import urllib.request from datetime import datetime @@ -36,7 +35,7 @@ @lru_cache(maxsize=None) -def _build_package_path_index() -> Dict[str, str]: +def _build_package_path_index() -> dict[str, str]: """ Build a one-time index mapping package names to their filesystem paths. @@ -97,8 +96,8 @@ def get_bundle_name(package_name: str) -> Optional[str]: def map_bundle_to_packages( - package_names: List[str], -) -> Tuple[Dict[str, List[str]], List[str]]: + package_names: list[str], +) -> tuple[dict[str, list[str]], list[str]]: """Create a mapping of bundle names to their constituent package names. :return: Tuple of (bundle_map, failed_packages) where failed_packages are packages that threw exceptions. @@ -127,7 +126,7 @@ def map_bundle_to_packages( # ===================================== -def parse_csv() -> List[Dict[str, str]]: +def parse_csv() -> list[dict[str, str]]: """Download and parse the Azure SDK Python packages CSV file.""" try: logger.info(f"Downloading CSV from {AZURE_SDK_CSV_URL}") @@ -148,7 +147,7 @@ def parse_csv() -> List[Dict[str, str]]: return [] -def is_mgmt_package(pkg: Dict[str, str]) -> bool: +def is_mgmt_package(pkg: dict[str, str]) -> bool: pkg_name = pkg.get(PACKAGE_COL, "") _type = pkg.get(TYPE_COL, "") if _type == "mgmt": @@ -162,8 +161,8 @@ def is_mgmt_package(pkg: Dict[str, str]) -> bool: def separate_packages_by_type( - packages: List[Dict[str, str]], -) -> Tuple[List[Dict[str, str]], List[Dict[str, str]]]: + packages: list[dict[str, str]], +) -> tuple[list[dict[str, str]], list[dict[str, str]]]: """Separate packages into data plane and management plane libraries.""" data_plane_packages = [] mgmt_plane_packages = [] @@ -182,7 +181,7 @@ def separate_packages_by_type( def package_needs_update( - package_row: Dict[str, str], prev_release_date: str, is_new=False + package_row: dict[str, str], prev_release_date: str, is_new=False ) -> bool: """ Check if the package is new or needs version update (i.e., FirstGADate or LatestGADate is after the last release). @@ -253,7 +252,7 @@ def is_stable_on_pypi(package_name: str) -> bool: def get_package_data_from_pypi( package_name: str, -) -> Tuple[Optional[str], Optional[str]]: +) -> tuple[Optional[str], Optional[str]]: """Fetch the latest version and download URI for a package from PyPI.""" try: client = PyPIClient() @@ -277,7 +276,7 @@ def get_package_data_from_pypi( return None, None -def build_package_index(conda_artifacts: List[Dict]) -> Dict[str, Tuple[int, int]]: +def build_package_index(conda_artifacts: list[dict]) -> dict[str, tuple[int, int]]: """Build an index of package name -> (artifact_idx, checkout_idx) for fast lookups in conda-sdk-client.yml.""" package_index = {} @@ -290,7 +289,7 @@ def build_package_index(conda_artifacts: List[Dict]) -> Dict[str, Tuple[int, int return package_index -def get_valid_package_imports(package_name: str) -> List[str]: +def get_valid_package_imports(package_name: str) -> list[str]: """ Inspect the package's actual module structure and return only valid imports. diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index 8a435a20d6e8..a22533be1b77 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -9,7 +9,7 @@ from dateutil.relativedelta import relativedelta from ci_tools.logging import logger, configure_logging from ci_tools.parsing import ParsedSetup -from typing import Dict, List, Optional, Tuple +from typing import Optional from conda_helper_functions import ( parse_csv, separate_packages_by_type, @@ -65,7 +65,7 @@ def quoted_presenter(dumper, data): def update_conda_version( target_release_date: Optional[datetime] = None, -) -> Tuple[datetime, str]: +) -> tuple[datetime, str]: """Update the AZURESDK_CONDA_VERSION in conda_env.yml and return the old and new versions. Args: @@ -111,11 +111,11 @@ def increase_indent(self, flow=False, indentless=False): def update_conda_sdk_client_yml( - package_dict: Dict[str, Dict[str, str]], - packages_to_update: List[str], - new_data_plane_packages: List[str], - new_mgmt_plane_packages: List[str], -) -> List[str]: + package_dict: dict[str, dict[str, str]], + packages_to_update: list[str], + new_data_plane_packages: list[str], + new_mgmt_plane_packages: list[str], +) -> list[str]: """ Update outdated package versions and add new entries in conda-sdk-client.yml file @@ -358,8 +358,8 @@ def update_conda_sdk_client_yml( def determine_service_info( - pkg: Dict[str, str], bundle_name: Optional[str] -) -> Tuple[str, str]: + pkg: dict[str, str], bundle_name: Optional[str] +) -> tuple[str, str]: """ Returns the common root and service name for the given package. @@ -394,7 +394,7 @@ def format_requirement(req: str) -> str: return req -def get_package_requirements(parsed: ParsedSetup) -> Tuple[List[str], List[str]]: +def get_package_requirements(parsed: ParsedSetup) -> tuple[list[str], list[str]]: """Retrieve the host and run requirements for a data plane package meta.yaml.""" host_requirements = set(["pip", "python"]) run_requirements = set(["python"]) @@ -423,7 +423,7 @@ def get_package_requirements(parsed: ParsedSetup) -> Tuple[List[str], List[str]] def get_package_metadata( package_name: str, package_path: str, is_bundle: bool = False -) -> Tuple[str, str, str]: +) -> tuple[str, str, str]: """Extract package metadata for about section in meta.yaml. :param package_name: The name of the package or bundle. @@ -451,7 +451,7 @@ def get_package_metadata( def generate_data_plane_meta_yaml( - bundle_map: Dict[str, List[str]], + bundle_map: dict[str, list[str]], package_name: str, bundle_name: Optional[str], ) -> str: @@ -564,9 +564,9 @@ def generate_data_plane_meta_yaml( def add_new_data_plane_packages( - bundle_map: Dict[str, List[str]], - new_data_plane_names: List[str], -) -> List[str]: + bundle_map: dict[str, list[str]], + new_data_plane_names: list[str], +) -> list[str]: """Create meta.yaml files for new data plane packages and add import tests.""" if len(new_data_plane_names) == 0: return [] @@ -628,7 +628,7 @@ def add_new_data_plane_packages( # ===================================== -def add_new_mgmt_plane_packages(new_mgmt_plane_names: List[str]) -> List[str]: +def add_new_mgmt_plane_packages(new_mgmt_plane_names: list[str]) -> list[str]: """Update azure-mgmt/meta.yaml with new management libraries, and add import tests.""" if len(new_mgmt_plane_names) == 0: return [] @@ -704,11 +704,11 @@ def add_new_mgmt_plane_packages(new_mgmt_plane_names: List[str]) -> List[str]: def update_data_plane_release_logs( - package_dict: Dict, - bundle_map: Dict[str, List[str]], - new_data_plane_names: List[str], + package_dict: dict, + bundle_map: dict[str, list[str]], + new_data_plane_names: list[str], release_date: str, -) -> List[str]: +) -> list[str]: """ Add and update release logs for data plane conda packages. Release log includes versions of all packages for the release """ @@ -872,10 +872,10 @@ def update_data_plane_release_logs( def update_mgmt_plane_release_log( - package_dict: Dict, - all_mgmt_plane_names: List[str], + package_dict: dict, + all_mgmt_plane_names: list[str], release_date: str, -) -> List[str]: +) -> list[str]: """ Update azure-mgmt release log. """ From d63024d7828493f502872263614c0b19118dd927 Mon Sep 17 00:00:00 2001 From: jennypng <63012604+JennyPng@users.noreply.github.com> Date: Thu, 5 Feb 2026 15:19:52 -0800 Subject: [PATCH 112/112] trim again --- conda/update_conda_files.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/conda/update_conda_files.py b/conda/update_conda_files.py index a22533be1b77..417f45bcbf0b 100644 --- a/conda/update_conda_files.py +++ b/conda/update_conda_files.py @@ -400,7 +400,7 @@ def get_package_requirements(parsed: ParsedSetup) -> tuple[list[str], list[str]] run_requirements = set(["python"]) # reqs commonly seen in existing meta.yaml files that aren't always in setup.py or pyproject.toml - for essential_req in ["azure-identity", "azure-core", "aiohttp"]: + for essential_req in ["azure-core", "aiohttp"]: req_name = format_requirement(essential_req) host_requirements.add(req_name) run_requirements.add(req_name)