|
| 1 | +import networkx as nx |
| 2 | +import yaml |
| 3 | +import re |
| 4 | +import glob |
| 5 | +import sys, os |
| 6 | +import textwrap |
| 7 | +import argparse |
| 8 | +from distutils.dir_util import copy_tree |
| 9 | +import yaml |
| 10 | + |
| 11 | + |
| 12 | +class folded_unicode(str): |
| 13 | + pass |
| 14 | + |
| 15 | + |
| 16 | +class literal_unicode(str): |
| 17 | + pass |
| 18 | + |
| 19 | + |
| 20 | +def folded_unicode_representer(dumper, data): |
| 21 | + return dumper.represent_scalar("tag:yaml.org,2002:str", data, style=">") |
| 22 | + |
| 23 | + |
| 24 | +def literal_unicode_representer(dumper, data): |
| 25 | + return dumper.represent_scalar("tag:yaml.org,2002:str", data, style="|") |
| 26 | + |
| 27 | + |
| 28 | +yaml.add_representer(folded_unicode, folded_unicode_representer) |
| 29 | +yaml.add_representer(literal_unicode, literal_unicode_representer) |
| 30 | + |
| 31 | + |
| 32 | +azure_linux_script = literal_unicode("""\ |
| 33 | +export CI=azure |
| 34 | +export GIT_BRANCH=$BUILD_SOURCEBRANCHNAME |
| 35 | +export FEEDSTOCK_NAME=$(basename ${BUILD_REPOSITORY_NAME}) |
| 36 | +.scripts/run_docker_build.sh""") |
| 37 | + |
| 38 | +azure_osx_script = literal_unicode(r"""\ |
| 39 | +export CI=azure |
| 40 | +export GIT_BRANCH=$BUILD_SOURCEBRANCHNAME |
| 41 | +export FEEDSTOCK_NAME=$(basename ${BUILD_REPOSITORY_NAME}) |
| 42 | +.scripts/build_osx.sh""") |
| 43 | + |
| 44 | +azure_osx_arm64_script = literal_unicode(r"""\ |
| 45 | +export CI=azure |
| 46 | +export GIT_BRANCH=$BUILD_SOURCEBRANCHNAME |
| 47 | +export FEEDSTOCK_NAME=$(basename ${BUILD_REPOSITORY_NAME}) |
| 48 | +.scripts/build_osx_arm64.sh""") |
| 49 | + |
| 50 | +azure_win_preconfig_script = literal_unicode("""\ |
| 51 | +set "CI=azure" |
| 52 | +call %CONDA%\\condabin\\conda_hook.bat |
| 53 | +call %CONDA%\\condabin\\conda.bat activate base |
| 54 | +
|
| 55 | +:: 2 cores available on Appveyor workers: https://www.appveyor.com/docs/build-environment/#build-vm-configurations |
| 56 | +:: CPU_COUNT is passed through conda build: https://github.com/conda/conda-build/pull/1149 |
| 57 | +set CPU_COUNT=2 |
| 58 | +
|
| 59 | +set PYTHONUNBUFFERED=1 |
| 60 | +
|
| 61 | +conda config --set show_channel_urls true |
| 62 | +conda config --set auto_update_conda false |
| 63 | +conda config --set add_pip_as_python_dependency false |
| 64 | +
|
| 65 | +call setup_x64 |
| 66 | +
|
| 67 | +:: Set the conda-build working directory to a smaller path |
| 68 | +if "%CONDA_BLD_PATH%" == "" ( |
| 69 | + set "CONDA_BLD_PATH=C:\\bld\\" |
| 70 | +) |
| 71 | +
|
| 72 | +:: Remove some directories from PATH |
| 73 | +set "PATH=%PATH:C:\\ProgramData\\Chocolatey\\bin;=%" |
| 74 | +set "PATH=%PATH:C:\\Program Files (x86)\\sbt\\bin;=%" |
| 75 | +set "PATH=%PATH:C:\\Rust\\.cargo\\bin;=%" |
| 76 | +set "PATH=%PATH:C:\\Program Files\\Git\\usr\\bin;=%" |
| 77 | +set "PATH=%PATH:C:\\Program Files\\Git\\cmd;=%" |
| 78 | +set "PATH=%PATH:C:\\Program Files\\Git\\mingw64\\bin;=%" |
| 79 | +set "PATH=%PATH:C:\\Program Files (x86)\\Subversion\\bin;=%" |
| 80 | +set "PATH=%PATH:C:\\Program Files\\CMake\\bin;=%" |
| 81 | +set "PATH=%PATH:C:\\Program Files\\OpenSSL\\bin;=%" |
| 82 | +set "PATH=%PATH:C:\\Strawberry\\c\\bin;=%" |
| 83 | +set "PATH=%PATH:C:\\Strawberry\\perl\\bin;=%" |
| 84 | +set "PATH=%PATH:C:\\Strawberry\\perl\\site\\bin;=%" |
| 85 | +set "PATH=%PATH:c:\\tools\\php;=%" |
| 86 | +
|
| 87 | +:: On azure, there are libcrypto*.dll & libssl*.dll under |
| 88 | +:: C:\\Windows\\System32, which should not be there (no vendor dlls in windows folder). |
| 89 | +:: They would be found before the openssl libs of the conda environment, so we delete them. |
| 90 | +if defined CI ( |
| 91 | + DEL C:\\Windows\\System32\\libcrypto-1_1-x64.dll || (Echo Ignoring failure to delete C:\\Windows\\System32\\libcrypto-1_1-x64.dll) |
| 92 | + DEL C:\\Windows\\System32\\libssl-1_1-x64.dll || (Echo Ignoring failure to delete C:\\Windows\\System32\\libssl-1_1-x64.dll) |
| 93 | +) |
| 94 | +
|
| 95 | +:: Make paths like C:\\hostedtoolcache\\windows\\Ruby\\2.5.7\\x64\\bin garbage |
| 96 | +set "PATH=%PATH:ostedtoolcache=%" |
| 97 | +
|
| 98 | +mkdir "%CONDA%\\etc\\conda\\activate.d" |
| 99 | +
|
| 100 | +echo set "CONDA_BLD_PATH=%CONDA_BLD_PATH%" > "%CONDA%\\etc\\conda\\activate.d\\conda-forge-ci-setup-activate.bat" |
| 101 | +echo set "CPU_COUNT=%CPU_COUNT%" >> "%CONDA%\\etc\\conda\\activate.d\\conda-forge-ci-setup-activate.bat" |
| 102 | +echo set "PYTHONUNBUFFERED=%PYTHONUNBUFFERED%" >> "%CONDA%\\etc\\conda\\activate.d\\conda-forge-ci-setup-activate.bat" |
| 103 | +echo set "PATH=%PATH%" >> "%CONDA%\\etc\\conda\\activate.d\\conda-forge-ci-setup-activate.bat" |
| 104 | +
|
| 105 | +conda info |
| 106 | +conda config --show-sources |
| 107 | +conda list --show-channel-urls |
| 108 | +""") |
| 109 | + |
| 110 | +azure_win_script = literal_unicode("""\ |
| 111 | +setlocal EnableExtensions EnableDelayedExpansion |
| 112 | +call %CONDA%\\condabin\\conda_hook.bat |
| 113 | +call %CONDA%\\condabin\\conda.bat activate base |
| 114 | +
|
| 115 | +set "FEEDSTOCK_ROOT=%cd%" |
| 116 | +
|
| 117 | +call conda config --append channels defaults |
| 118 | +call conda config --add channels conda-forge |
| 119 | +call conda config --add channels robostack |
| 120 | +call conda config --set channel_priority strict |
| 121 | +
|
| 122 | +:: conda remove --force m2-git |
| 123 | +
|
| 124 | +C:\\Miniconda\\python.exe -m pip install git+https://github.com/mamba-org/boa.git@master |
| 125 | +if errorlevel 1 exit 1 |
| 126 | +
|
| 127 | +for %%X in (%CURRENT_RECIPES%) do ( |
| 128 | + echo "BUILDING RECIPE %%X" |
| 129 | + cd %FEEDSTOCK_ROOT%\\recipes\\%%X\\ |
| 130 | + copy %FEEDSTOCK_ROOT%\\conda_build_config.yaml .\\conda_build_config.yaml |
| 131 | + boa build . |
| 132 | + if errorlevel 1 exit 1 |
| 133 | +) |
| 134 | +
|
| 135 | +anaconda -t %ANACONDA_API_TOKEN% upload "C:\\bld\\win-64\\*.tar.bz2" --force |
| 136 | +if errorlevel 1 exit 1 |
| 137 | +""") |
| 138 | + |
| 139 | +parsed_args = None |
| 140 | + |
| 141 | + |
| 142 | +def parse_command_line(argv): |
| 143 | + parser = argparse.ArgumentParser( |
| 144 | + description="Conda recipe Azure pipeline generator for ROS packages" |
| 145 | + ) |
| 146 | + |
| 147 | + default_dir = "./recipes" |
| 148 | + parser.add_argument( |
| 149 | + "-d", |
| 150 | + "--dir", |
| 151 | + dest="dir", |
| 152 | + default=default_dir, |
| 153 | + help="The recipes directory to process (default: {}).".format(default_dir), |
| 154 | + ) |
| 155 | + |
| 156 | + parser.add_argument( |
| 157 | + "-t", "--trigger-branch", dest="trigger_branch", help="Trigger branch for Azure" |
| 158 | + ) |
| 159 | + |
| 160 | + parser.add_argument( |
| 161 | + "-p", |
| 162 | + "--platform", |
| 163 | + dest="platform", |
| 164 | + default="linux-64", |
| 165 | + help="Platform to emit build pipeline for", |
| 166 | + ) |
| 167 | + |
| 168 | + parser.add_argument( |
| 169 | + "-a", "--additional-recipes", action="store_true", help="search for additional_recipes folder?") |
| 170 | + |
| 171 | + arguments = parser.parse_args(argv[1:]) |
| 172 | + global parsed_args |
| 173 | + parsed_args = arguments |
| 174 | + return arguments |
| 175 | + |
| 176 | + |
| 177 | +def normalize_name(s): |
| 178 | + s = s.replace("-", "_") |
| 179 | + return re.sub("[^a-zA-Z0-9_]+", "", s) |
| 180 | + |
| 181 | + |
| 182 | +def batch_stages(stages, max_batch_size=5): |
| 183 | + with open("vinca.yaml", "r") as vinca_yaml: |
| 184 | + vinca_conf = yaml.safe_load(vinca_yaml) |
| 185 | + |
| 186 | + # this reduces the number of individual builds to try to save some time |
| 187 | + stage_lengths = [len(s) for s in stages] |
| 188 | + merged_stages = [] |
| 189 | + curr_stage = [] |
| 190 | + build_individually = vinca_conf.get("build_in_own_azure_stage", []) |
| 191 | + |
| 192 | + def chunks(lst, n): |
| 193 | + """Yield successive n-sized chunks from lst.""" |
| 194 | + for i in range(0, len(lst), n): |
| 195 | + yield lst[i:i + n] |
| 196 | + i = 0 |
| 197 | + while i < len(stages): |
| 198 | + for build_individually_pkg in build_individually: |
| 199 | + if build_individually_pkg in stages[i]: |
| 200 | + merged_stages.append([[build_individually_pkg]]) |
| 201 | + stages[i].remove(build_individually_pkg) |
| 202 | + |
| 203 | + if stage_lengths[i] < max_batch_size and len(curr_stage) + stage_lengths[i] < max_batch_size: |
| 204 | + # merge with previous stage |
| 205 | + curr_stage += stages[i] |
| 206 | + else: |
| 207 | + if len(curr_stage): |
| 208 | + merged_stages.append([curr_stage]) |
| 209 | + curr_stage = [] |
| 210 | + if stage_lengths[i] < max_batch_size: |
| 211 | + curr_stage += stages[i] |
| 212 | + else: |
| 213 | + # split this stage into multiple |
| 214 | + merged_stages.append(list(chunks(stages[i], max_batch_size))) |
| 215 | + i += 1 |
| 216 | + if len(curr_stage): |
| 217 | + merged_stages.append([curr_stage]) |
| 218 | + return merged_stages |
| 219 | + |
| 220 | +import requests |
| 221 | + |
| 222 | +def get_skip_existing(vinca_conf, platform): |
| 223 | + fn = vinca_conf.get("skip_existing") |
| 224 | + repodatas = [] |
| 225 | + if fn is not None: |
| 226 | + fns = list(fn) |
| 227 | + else: |
| 228 | + fns = [] |
| 229 | + for fn in fns: |
| 230 | + selected_bn = None |
| 231 | + if "://" in fn: |
| 232 | + fn += f"{platform}/repodata.json" |
| 233 | + print(f"Fetching repodata: {fn}") |
| 234 | + request = requests.get(fn) |
| 235 | + |
| 236 | + repodata = request.json() |
| 237 | + repodatas.append(repodata) |
| 238 | + else: |
| 239 | + import json |
| 240 | + with open(fn) as fi: |
| 241 | + repodata = json.load(fi) |
| 242 | + repodatas.append(repodata) |
| 243 | + |
| 244 | + return repodatas |
| 245 | + |
| 246 | +def add_additional_recipes(args): |
| 247 | + additional_recipes_path = os.path.abspath(os.path.join(args.dir, '..', 'additional_recipes')) |
| 248 | + |
| 249 | + print("Searching additional recipes in ", additional_recipes_path) |
| 250 | + |
| 251 | + if not os.path.exists(additional_recipes_path): |
| 252 | + return |
| 253 | + |
| 254 | + with open("vinca.yaml", "r") as vinca_yaml: |
| 255 | + vinca_conf = yaml.safe_load(vinca_yaml) |
| 256 | + |
| 257 | + repodatas = get_skip_existing(vinca_conf, args.platform) |
| 258 | + |
| 259 | + for recipe_path in glob.glob(additional_recipes_path + '/**/recipe.yaml'): |
| 260 | + with open(recipe_path) as recipe: |
| 261 | + additional_recipe = yaml.safe_load(recipe) |
| 262 | + |
| 263 | + name, version, bnumber = (additional_recipe["package"]["name"], additional_recipe["package"]["version"], additional_recipe["build"]["number"]) |
| 264 | + print("Checking if ", name, version, bnumber, " exists") |
| 265 | + skip = False |
| 266 | + for repo in repodatas: |
| 267 | + for key, pkg in repo.get("packages", {}).items(): |
| 268 | + if pkg["name"] == name and pkg["version"] == version and pkg["build_number"] == bnumber: |
| 269 | + skip = True |
| 270 | + print(f"{name}=={version}=={bnumber} already exists. Skipping.") |
| 271 | + break |
| 272 | + |
| 273 | + if not skip: |
| 274 | + print("Adding ", os.path.dirname(recipe_path)) |
| 275 | + goal_folder = os.path.join(args.dir, name) |
| 276 | + os.makedirs(goal_folder, exist_ok=True) |
| 277 | + copy_tree(os.path.dirname(recipe_path), goal_folder) |
| 278 | + |
| 279 | + |
| 280 | +def main(): |
| 281 | + |
| 282 | + args = parse_command_line(sys.argv) |
| 283 | + |
| 284 | + metas = [] |
| 285 | + |
| 286 | + if args.additional_recipes: |
| 287 | + add_additional_recipes(args) |
| 288 | + |
| 289 | + if not os.path.exists(args.dir): |
| 290 | + print(f"{args.dir} not found. Not generating a pipeline.") |
| 291 | + |
| 292 | + all_recipes = glob.glob(os.path.join(args.dir, "**", "*.yaml")) |
| 293 | + for f in all_recipes: |
| 294 | + with open(f) as fi: |
| 295 | + metas.append(yaml.safe_load(fi.read())) |
| 296 | + |
| 297 | + if len(metas) >= 1: |
| 298 | + requirements = {} |
| 299 | + |
| 300 | + for pkg in metas: |
| 301 | + requirements[pkg["package"]["name"]] = ( |
| 302 | + pkg["requirements"].get("host", []) + pkg["requirements"].get("run", []) |
| 303 | + ) |
| 304 | + |
| 305 | + # sort out requirements that are not built in this run |
| 306 | + for pkg_name, reqs in requirements.items(): |
| 307 | + requirements[pkg_name] = [ |
| 308 | + r.split()[0] for r in reqs if (isinstance(r, str) and r in reqs) |
| 309 | + ] |
| 310 | + print(requirements) |
| 311 | + |
| 312 | + G = nx.DiGraph() |
| 313 | + for pkg, reqs in requirements.items(): |
| 314 | + G.add_node(pkg) |
| 315 | + for r in reqs: |
| 316 | + if r.startswith("ros-"): |
| 317 | + G.add_edge(pkg, r) |
| 318 | + |
| 319 | + import matplotlib.pyplot as plt |
| 320 | + from networkx.drawing.nx_agraph import write_dot |
| 321 | + |
| 322 | + nx.draw(G, with_labels=True, font_weight='bold') |
| 323 | + plt.show() |
| 324 | + |
| 325 | + write_dot(G, "grid.dot") |
| 326 | + |
| 327 | + tg = list(reversed(list(nx.topological_sort(G)))) |
| 328 | + |
| 329 | + stages = [] |
| 330 | + current_stage = [] |
| 331 | + for pkg in tg: |
| 332 | + reqs = requirements.get(pkg, []) |
| 333 | + sort_in_stage = 0 |
| 334 | + for r in reqs: |
| 335 | + # sort up the stages, until first stage found where all requirements are fulfilled. |
| 336 | + for sidx, stage in enumerate(stages): |
| 337 | + if r in stages[sidx]: |
| 338 | + sort_in_stage = max(sidx + 1, sort_in_stage) |
| 339 | + |
| 340 | + # if r in current_stage: |
| 341 | + # stages.append(current_stage) |
| 342 | + # current_stage = [] |
| 343 | + if sort_in_stage >= len(stages): |
| 344 | + stages.append([pkg]) |
| 345 | + else: |
| 346 | + stages[sort_in_stage].append(pkg) |
| 347 | + # current_stage.append(pkg) |
| 348 | + |
| 349 | + if len(current_stage): |
| 350 | + stages.append(current_stage) |
0 commit comments