From beb49282e9d4260790fad74335af00672a7bd97b Mon Sep 17 00:00:00 2001 From: Techassi Date: Tue, 12 Aug 2025 15:46:07 +0200 Subject: [PATCH 1/9] feat: Add boil tool --- Cargo.lock | 319 ++++++++++++++++++--- Cargo.toml | 16 +- boil.toml | 5 + rust/boil/Cargo.toml | 19 ++ rust/boil/README.md | 31 ++ rust/boil/src/build/bakefile.rs | 478 +++++++++++++++++++++++++++++++ rust/boil/src/build/cli.rs | 122 ++++++++ rust/boil/src/build/docker.rs | 156 ++++++++++ rust/boil/src/build/image.rs | 163 +++++++++++ rust/boil/src/build/mod.rs | 105 +++++++ rust/boil/src/build/platform.rs | 67 +++++ rust/boil/src/cli.rs | 89 ++++++ rust/boil/src/completions/mod.rs | 10 + rust/boil/src/config.rs | 26 ++ rust/boil/src/main.rs | 94 ++++++ rust/boil/src/show/images.rs | 27 ++ rust/boil/src/show/mod.rs | 1 + rust/boil/src/utils.rs | 23 ++ 18 files changed, 1708 insertions(+), 43 deletions(-) create mode 100644 boil.toml create mode 100644 rust/boil/Cargo.toml create mode 100644 rust/boil/README.md create mode 100644 rust/boil/src/build/bakefile.rs create mode 100644 rust/boil/src/build/cli.rs create mode 100644 rust/boil/src/build/docker.rs create mode 100644 rust/boil/src/build/image.rs create mode 100644 rust/boil/src/build/mod.rs create mode 100644 rust/boil/src/build/platform.rs create mode 100644 rust/boil/src/cli.rs create mode 100644 rust/boil/src/completions/mod.rs create mode 100644 rust/boil/src/config.rs create mode 100644 rust/boil/src/main.rs create mode 100644 rust/boil/src/show/images.rs create mode 100644 rust/boil/src/show/mod.rs create mode 100644 rust/boil/src/utils.rs diff --git a/Cargo.lock b/Cargo.lock index 2b20caacd..5181b2141 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2,6 +2,21 @@ # It is not intended for manual editing. version = 4 +[[package]] +name = "addr2line" +version = "0.24.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler2" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" + [[package]] name = "aho-corasick" version = "1.1.3" @@ -47,7 +62,7 @@ version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c" dependencies = [ - "windows-sys", + "windows-sys 0.59.0", ] [[package]] @@ -58,7 +73,7 @@ checksum = "ca3534e77181a9cc07539ad51f2141fe32f6c3ffd4df76db8ad92346b003ae4e" dependencies = [ "anstyle", "once_cell", - "windows-sys", + "windows-sys 0.59.0", ] [[package]] @@ -67,18 +82,58 @@ version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" +[[package]] +name = "backtrace" +version = "0.3.75" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6806a6321ec58106fea15becdad98371e28d92ccbc7c8f1b3b6dd724fe8f1002" +dependencies = [ + "addr2line", + "cfg-if", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", + "windows-targets", +] + [[package]] name = "bitflags" version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f68f53c83ab957f72c32642f3868eec03eb974d1fb82e453128456482613d36" +[[package]] +name = "boil" +version = "0.0.1" +dependencies = [ + "clap", + "clap_complete", + "git2", + "glob", + "semver", + "serde", + "serde_json", + "snafu", + "strum", + "time", + "tokio", + "toml", + "url", +] + [[package]] name = "bumpalo" version = "3.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf" +[[package]] +name = "bytes" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" + [[package]] name = "cc" version = "1.2.11" @@ -98,9 +153,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "clap" -version = "4.5.27" +version = "4.5.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "769b0145982b4b48713e01ec42d61614425f27b7058bda7180a3a41f30104796" +checksum = "be92d32e80243a54711e5d7ce823c35c41c9d929dc4ab58e1276f625841aadf9" dependencies = [ "clap_builder", "clap_derive", @@ -108,9 +163,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.27" +version = "4.5.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b26884eb4b57140e4d2d93652abfa49498b938b3c9179f9fc487b0acc3edad7" +checksum = "707eab41e9622f9139419d573eca0900137718000c517d47da73045f54331c3d" dependencies = [ "anstream", "anstyle", @@ -118,11 +173,20 @@ dependencies = [ "strsim", ] +[[package]] +name = "clap_complete" +version = "4.5.55" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5abde44486daf70c5be8b8f8f1b66c49f86236edf6fa2abadb4d961c4c6229a" +dependencies = [ + "clap", +] + [[package]] name = "clap_derive" -version = "4.5.24" +version = "4.5.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54b755194d6389280185988721fffba69495eed5ee9feeee9a599b53db80318c" +checksum = "ef4f52386a59ca4c860f7393bcf8abd8dfd91ecccc0f774635ff68e92eeef491" dependencies = [ "heck", "proc-macro2", @@ -152,14 +216,14 @@ dependencies = [ "libc", "once_cell", "unicode-width 0.2.0", - "windows-sys", + "windows-sys 0.59.0", ] [[package]] name = "deranged" -version = "0.3.11" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" +checksum = "9c9e6a11ca8224451684bc0d7d5a7adbf8f2fd6887261a1cfc3c0432f9d4068e" dependencies = [ "powerfmt", ] @@ -194,7 +258,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" dependencies = [ "libc", - "windows-sys", + "windows-sys 0.59.0", ] [[package]] @@ -220,10 +284,16 @@ checksum = "43a49c392881ce6d5c3b8cb70f98717b7c07aabbdff06687b9030dbfbe2725f8" dependencies = [ "cfg-if", "libc", - "wasi", + "wasi 0.13.3+wasi-0.2.2", "windows-targets", ] +[[package]] +name = "gimli" +version = "0.31.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" + [[package]] name = "git2" version = "0.20.1" @@ -239,6 +309,12 @@ dependencies = [ "url", ] +[[package]] +name = "glob" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2" + [[package]] name = "hashbrown" version = "0.15.2" @@ -414,6 +490,17 @@ dependencies = [ "web-time", ] +[[package]] +name = "io-uring" +version = "0.7.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b86e202f00093dcba4275d4636b93ef9dd75d025ae560d2521b45ea28ab49013" +dependencies = [ + "bitflags", + "cfg-if", + "libc", +] + [[package]] name = "is_terminal_polyfill" version = "1.70.1" @@ -530,6 +617,26 @@ version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" +[[package]] +name = "miniz_oxide" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" +dependencies = [ + "adler2", +] + +[[package]] +name = "mio" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78bed444cc8a2160f01cbcf811ef18cac863ad68ae8ca62092e8db51d51c761c" +dependencies = [ + "libc", + "wasi 0.11.1+wasi-snapshot-preview1", + "windows-sys 0.59.0", +] + [[package]] name = "nu-ansi-term" version = "0.46.0" @@ -552,6 +659,15 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" +[[package]] +name = "object" +version = "0.36.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" +dependencies = [ + "memchr", +] + [[package]] name = "once_cell" version = "1.20.2" @@ -690,6 +806,12 @@ version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" +[[package]] +name = "rustc-demangle" +version = "0.1.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "989e6739f80c4ad5b13e0fd7fe89531180375b18520cc8c82080e4dc4035b84f" + [[package]] name = "rustix" version = "0.38.44" @@ -700,7 +822,28 @@ dependencies = [ "errno", "libc", "linux-raw-sys", - "windows-sys", + "windows-sys 0.59.0", +] + +[[package]] +name = "rustversion" +version = "1.0.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a0d197bd2c9dc6e53b84da9556a69ba4cdfab8619eb41a8bd1cc2027a0f6b1d" + +[[package]] +name = "ryu" +version = "1.0.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" + +[[package]] +name = "semver" +version = "1.0.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56e6fa9c48d24d85fb3de5ad847117517440f6beceb7798af16b4a87d616b8d0" +dependencies = [ + "serde", ] [[package]] @@ -723,11 +866,23 @@ dependencies = [ "syn", ] +[[package]] +name = "serde_json" +version = "1.0.140" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373" +dependencies = [ + "itoa", + "memchr", + "ryu", + "serde", +] + [[package]] name = "serde_spanned" -version = "0.6.8" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87607cb1398ed59d48732e575a4c28a7a8ebf2454b964fe3f224f2afc07909e1" +checksum = "40734c41988f7306bb04f0ecf60ec0f3f1caa34290e4e8ea471dcd3346483b83" dependencies = [ "serde", ] @@ -747,6 +902,21 @@ version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" +[[package]] +name = "signal-hook-registry" +version = "1.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9203b8055f63a2a00e2f593bb0510367fe707d7ff1e5c872de2f537b339e5410" +dependencies = [ + "libc", +] + +[[package]] +name = "slab" +version = "0.4.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04dc19736151f35336d325007ac991178d504a119863a2fcb3758cdb5e52c50d" + [[package]] name = "smallvec" version = "1.13.2" @@ -786,6 +956,28 @@ version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" +[[package]] +name = "strum" +version = "0.27.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f64def088c51c9510a8579e3c5d67c65349dcf755e5479ad3d010aa6454e2c32" +dependencies = [ + "strum_macros", +] + +[[package]] +name = "strum_macros" +version = "0.27.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c77a8c5abcaf0f9ce05d62342b7d298c346515365c36b673df4ebe3ced01fde8" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "rustversion", + "syn", +] + [[package]] name = "syn" version = "2.0.96" @@ -819,7 +1011,7 @@ dependencies = [ "getrandom", "once_cell", "rustix", - "windows-sys", + "windows-sys 0.59.0", ] [[package]] @@ -834,11 +1026,12 @@ dependencies = [ [[package]] name = "time" -version = "0.3.37" +version = "0.3.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35e7868883861bd0e56d9ac6efcaaca0d6d5d82a2a7ec8209ff492c07cf37b21" +checksum = "8a7619e19bc266e0f9c5e6686659d394bc57973859340060a69221e57dbc0c40" dependencies = [ "deranged", + "itoa", "num-conv", "powerfmt", "serde", @@ -848,15 +1041,15 @@ dependencies = [ [[package]] name = "time-core" -version = "0.1.2" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" +checksum = "c9e9a38711f559d9e3ce1cdb06dd7c5b8ea546bc90052da6d06bb76da74bb07c" [[package]] name = "time-macros" -version = "0.2.19" +version = "0.2.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2834e6017e3e5e4b9834939793b282bc03b37a3336245fa820e35e233e2a85de" +checksum = "3526739392ec93fd8b359c8e98514cb3e8e021beb4e5f597b00a0221f8ed8a49" dependencies = [ "num-conv", "time-core", @@ -872,40 +1065,74 @@ dependencies = [ "zerovec", ] +[[package]] +name = "tokio" +version = "1.46.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0cc3a2344dafbe23a245241fe8b09735b521110d30fcefbbd5feb1797ca35d17" +dependencies = [ + "backtrace", + "bytes", + "io-uring", + "libc", + "mio", + "pin-project-lite", + "signal-hook-registry", + "slab", + "tokio-macros", + "windows-sys 0.52.0", +] + +[[package]] +name = "tokio-macros" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "toml" -version = "0.8.19" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1ed1f98e3fdc28d6d910e6737ae6ab1a93bf1985935a1193e68f93eeb68d24e" +checksum = "ed0aee96c12fa71097902e0bb061a5e1ebd766a6636bb605ba401c45c1650eac" dependencies = [ + "indexmap", "serde", "serde_spanned", "toml_datetime", - "toml_edit", + "toml_parser", + "toml_writer", + "winnow", ] [[package]] name = "toml_datetime" -version = "0.6.8" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" +checksum = "bade1c3e902f58d73d3f294cd7f20391c1cb2fbcb643b73566bc773971df91e3" dependencies = [ "serde", ] [[package]] -name = "toml_edit" -version = "0.22.23" +name = "toml_parser" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02a8b472d1a3d7c18e2d61a489aee3453fd9031c33e4f55bd533f4a7adca1bee" +checksum = "97200572db069e74c512a14117b296ba0a80a30123fbbb5aa1f4a348f639ca30" dependencies = [ - "indexmap", - "serde", - "serde_spanned", - "toml_datetime", "winnow", ] +[[package]] +name = "toml_writer" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b679217f2848de74cabd3e8fc5e6d66f40b7da40f8e1954d92054d9010690fd5" + [[package]] name = "tracing" version = "0.1.41" @@ -1071,6 +1298,12 @@ dependencies = [ "quote", ] +[[package]] +name = "wasi" +version = "0.11.1+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" + [[package]] name = "wasi" version = "0.13.3+wasi-0.2.2" @@ -1169,6 +1402,15 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets", +] + [[package]] name = "windows-sys" version = "0.59.0" @@ -1244,12 +1486,9 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" -version = "0.7.0" +version = "0.7.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e49d2d35d3fad69b39b94139037ecfb4f359f08958b9c11e7315ce770462419" -dependencies = [ - "memchr", -] +checksum = "f3edebf492c8125044983378ecb5766203ad3b4c2f7a922bd7dd207f6d443e95" [[package]] name = "wit-bindgen-rt" diff --git a/Cargo.toml b/Cargo.toml index 805dab66f..024ec4068 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -2,14 +2,24 @@ members = ["rust/*"] resolver = "2" +[workspace.package] +authors = ["Stackable "] + [workspace.dependencies] -clap = { version = "4.5.27", features = ["derive"] } +clap = { version = "4.5.41", features = ["derive"] } +clap_complete = "4.5.55" git2 = "0.20.1" +glob = "0.3.2" +semver = { version = "1.0.26", features = ["serde"] } serde = { version = "1.0.217", features = ["derive"] } +serde_json = "1.0.140" snafu = "0.8.5" +strum = { version = "0.27.1", features = ["derive"] } tempfile = "3.16.0" -time = { version = "0.3.37", features = ["parsing"] } -toml = "0.8.19" +time = { version = "0.3.41", features = ["parsing", "formatting"] } +tokio = { version = "1.46.1", features = ["rt", "macros", "process"] } +toml = "0.9.2" tracing = "0.1.41" tracing-indicatif = "0.3.9" tracing-subscriber = { version = "0.3.19", features = ["env-filter"] } +url = "2.5.4" diff --git a/boil.toml b/boil.toml new file mode 100644 index 000000000..67cfac8be --- /dev/null +++ b/boil.toml @@ -0,0 +1,5 @@ +[build-arguments] +STACKABLE_USER_NAME = "stackable" +STACKABLE_USER_UID = "1000" +STACKABLE_USER_GID = "1000" +DELETE_CACHES = "true" diff --git a/rust/boil/Cargo.toml b/rust/boil/Cargo.toml new file mode 100644 index 000000000..cb68a0201 --- /dev/null +++ b/rust/boil/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "boil" +version = "0.0.1" +edition = "2024" + +[dependencies] +clap.workspace = true +clap_complete.workspace = true +git2.workspace = true +glob.workspace = true +semver.workspace = true +serde.workspace = true +serde_json.workspace = true +snafu.workspace = true +strum.workspace = true +time.workspace = true +tokio.workspace = true +toml.workspace = true +url.workspace = true diff --git a/rust/boil/README.md b/rust/boil/README.md new file mode 100644 index 000000000..e29b38986 --- /dev/null +++ b/rust/boil/README.md @@ -0,0 +1,31 @@ +# boil + +boil builds container images in parallel. + +- Define versions of container images and version specific values via the `boil-config.toml` file. +- Refer to local images in Containerfiles via `FROM local-image/...`. Nesting is supported. +- Structured output is provided for any potential follow-up tasks. + +## Quick Overview + +```shell +# Builds all version of the image located in the 'airflow' folder +boil build airflow + +# Builds the 3.0.1 version of the image located in the 'airflow' folder +boil build airflow=3.0.1 + +# Builds both the 3.0.1 and 2.10.5 versions of the image located in the +# 'airflow' folder +boil build airflow=3.0.1,2.10.5 + +# Builds all versions of the images located in the 'airflow' and 'opa' folder +boil build airflow opa + +# Display a list of all images and their declared versions as structured JSON +# output +boil show images + +# Soon (hopefully) implemented +boil show graph +``` diff --git a/rust/boil/src/build/bakefile.rs b/rust/boil/src/build/bakefile.rs new file mode 100644 index 000000000..e56896888 --- /dev/null +++ b/rust/boil/src/build/bakefile.rs @@ -0,0 +1,478 @@ +use std::{ + collections::BTreeMap, + fmt::Debug, + ops::{Deref, DerefMut}, + path::PathBuf, +}; + +use glob::glob; +use serde::Serialize; +use snafu::{ResultExt, Snafu}; +use time::format_description::well_known::Rfc3339; +use url::Host; + +use crate::{ + VersionExt, + build::{ + cli, + docker::{BuildArgument, BuildArguments}, + image::{Image, ImageConfig, ImageConfigError, ImageOptions, VersionOptionsPair}, + platform::TargetPlatform, + }, + config::Config, + utils::{format_image_manifest_uri, format_image_repository_uri}, +}; + +pub const OPEN_CONTAINER_IMAGE_REVISION: &str = "org.opencontainers.image.revision"; +pub const OPEN_CONTAINER_IMAGE_CREATED: &str = "org.opencontainers.image.created"; + +pub const ENTRY_TARGET_NAME_PREFIX: &str = "entry--"; + +#[derive(Debug, Snafu)] +pub enum GitError { + #[snafu(display("failed to open git repository"))] + OpenRepository { source: git2::Error }, + + #[snafu(display("failed to parse HEAD revision"))] + ParseHeadRevision { source: git2::Error }, +} + +#[derive(Debug, Snafu)] +pub enum Error { + #[snafu(display("failed to format current datetime"))] + FormatTime { source: time::error::Format }, + + #[snafu(display("failed to get revision"))] + GetRevision { source: GitError }, + + #[snafu(display("failed to create target graph"))] + CreateGraph { source: TargetsError }, +} + +#[derive(Debug, Snafu)] +pub enum TargetsError { + #[snafu(display("encountered invalid product version"))] + InvalidProductVersion { source: ImageConfigError }, + + #[snafu(display("failed to read image config"))] + ReadImageConfig { source: ImageConfigError }, +} + +#[derive(Debug, Default)] +pub struct TargetsOptions { + pub only_entry: bool, +} + +/// Contains targets selected by the user. +/// +/// This is a map which uses the image/target name as the key. Each key points to another map, +/// which contains one entry per version of the target. Each value contains the image options and +/// a boolean flag to indicate if this target is an entry target. +#[derive(Debug, Default)] +pub struct Targets(BTreeMap>); + +impl Deref for Targets { + type Target = BTreeMap>; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl DerefMut for Targets { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} + +impl IntoIterator for Targets { + type Item = (String, BTreeMap); + type IntoIter = + std::collections::btree_map::IntoIter>; + + fn into_iter(self) -> Self::IntoIter { + self.0.into_iter() + } +} + +impl Targets { + pub fn all(options: TargetsOptions) -> Result { + let image_config_paths = glob("./**/boil-config.toml") + .expect("glob pattern must be valid") + .filter_map(Result::ok); + + let mut targets = Self::default(); + + for image_config_path in image_config_paths { + let image_config = + ImageConfig::from_file(&image_config_path).context(ReadImageConfigSnafu)?; + + let image_name = image_config_path + .parent() + .expect("there must be a parent") + .to_string_lossy() + .into_owned(); + + let pairs = image_config.all(); + + targets.insert_targets(image_name.to_owned(), pairs, &options, true)?; + } + + println!("{targets:#?}"); + + Ok(targets) + } + + pub fn from_images(images: &[Image], options: TargetsOptions) -> Result { + let mut targets = Self::default(); + + for image in images { + // TODO (@Techassi): We should instead build the graph based on the Dockerfile(s), + // because this is the source of truth and what ultimately gets built. The boil config + // files are not a source a truth, but just provide data needed during the build. + let image_config_path = PathBuf::new().join(&image.name).join("boil-config.toml"); + + // Read the product config which defines supported product versions and their dependencies as + // well as other values. + let image_config = + ImageConfig::from_file(image_config_path).context(ReadImageConfigSnafu)?; + + // Create a list of product versions we need to generate targets for in the bakefile. + let pairs = image_config + .filter_by_version(&image.versions) + .context(InvalidProductVersionSnafu)?; + + targets.insert_targets(image.name.clone(), pairs, &options, true)?; + } + + Ok(targets) + } + + fn insert_targets( + &mut self, + image_name: String, + pairs: Vec, + options: &TargetsOptions, + is_entry: bool, + ) -> Result<(), TargetsError> { + for VersionOptionsPair { + version: image_version, + options: image_options, + } in pairs + { + if !options.only_entry { + // TODO (@Techassi): Add cycle detection + for (image_name, image_version) in &image_options.local_images { + if self + .get(image_name) + .is_some_and(|image_versions| image_versions.contains_key(image_version)) + { + continue; + } + + let product_config_path = + PathBuf::new().join(image_name).join("boil-config.toml"); + + let product_config = ImageConfig::from_file(product_config_path) + .context(ReadImageConfigSnafu)?; + + let pairs = product_config + .filter_by_version(&[image_version]) + .context(InvalidProductVersionSnafu)?; + + // Wowzers, recursion! + self.insert_targets(image_name.clone(), pairs, options, false)?; + } + } + + self.entry(image_name.clone()) + .or_default() + .insert(image_version, (image_options, is_entry)); + } + + Ok(()) + } +} + +#[derive(Debug, Default, Serialize)] +pub struct Bakefile { + #[serde(rename = "group")] + pub groups: BTreeMap, + + #[serde(rename = "target")] + pub targets: BTreeMap, +} + +impl Bakefile { + /// Create a bakefile from the [`BuildArguments`](cli::BuildArguments) provided via the CLI. + /// + /// This will only create targets for selected entry images and their dependencies. There is no + /// need to filter anything out afterwards. The filtering is done automatically internally. + pub fn from_args(args: &cli::BuildArguments, config: Config) -> Result { + let graph = Targets::from_images(&args.images, TargetsOptions::default()) + .context(CreateGraphSnafu)?; + Self::from_targets(graph, args, config) + } + + /// Returns all image manifest URIs for entry images. + pub fn image_manifest_uris(&self) -> Vec<&str> { + self.targets + .iter() + // We only care about the entry targets, because those are the primary images boil + // builds. + .filter(|(target_name, _)| target_name.starts_with(ENTRY_TARGET_NAME_PREFIX)) + // The image manifest URIs file only contains the image tags + .flat_map(|(_, target)| &target.tags) + // Flatten multiple tags (boil currently only ever writes a single one, but the data + // structure can accept a list). + .map(|s| s.as_str()) + .collect() + } + + fn from_targets( + targets: Targets, + args: &cli::BuildArguments, + config: Config, + ) -> Result { + let mut bakefile_targets = BTreeMap::new(); + let mut groups: BTreeMap = BTreeMap::new(); + + // TODO (@Techassi): Can we somehow optimize this to come by with minimal amount of + // cloning, because we also need to clone on every loop iteration below. + let mut docker_build_arguments = config.build_arguments; + docker_build_arguments.extend(args.docker_build_arguments.clone()); + docker_build_arguments.insert(BuildArgument::new( + "RELEASE_VERSION".to_owned(), + args.image_version.base_prerelease(), + )); + + for (image_name, image_versions) in targets.0.into_iter() { + for (image_version, (image_options, is_entry)) in image_versions { + // TODO (@Techassi): Clean this up + // TODO (@Techassi): Move the arg formatting into functions + let mut docker_build_arguments = docker_build_arguments.clone(); + + let local_version_docker_args: Vec<_> = image_options + .local_images + .iter() + .map(|(image_name, image_version)| { + BuildArgument::new( + format!( + "{image_name}_VERSION", + image_name = image_name.to_uppercase().replace('-', "_") + ), + image_version.to_string(), + ) + }) + .collect(); + + docker_build_arguments.extend(image_options.build_arguments.clone()); + docker_build_arguments.extend(local_version_docker_args); + docker_build_arguments.insert(BuildArgument::new( + "PRODUCT_VERSION".to_owned(), + image_version.to_string(), + )); + + // The image registry, eg. `oci.stackable.tech` or `localhost` + let image_registry = if args.use_localhost_registry { + &Host::Domain(String::from("localhost")) + } else { + &args.registry + }; + + let image_repository_uri = format_image_repository_uri( + image_registry, + &args.registry_namespace, + &image_name, + ); + + let image_manifest_uri = format_image_manifest_uri( + &image_repository_uri, + &image_version, + &args.image_version, + args.target_platform.architecture(), + ); + + let dockerfile = PathBuf::new().join(&image_name).join("Dockerfile"); + let revision = Self::git_head_revision().context(GetRevisionSnafu)?; + let date_time = Self::now()?; + + let target_name = if is_entry { + Self::format_entry_target_name(&image_name, &image_version) + } else { + Self::format_target_name(&image_name, &image_version) + }; + + let contexts: BTreeMap<_, _> = image_options + .local_images + .iter() + .map(|(image_name, image_version)| { + let context_name = Self::format_context_name(image_name); + let context_target = Self::format_context_target(image_name, image_version); + + (context_name, context_target) + }) + .collect(); + + let target = BakefileTarget { + annotations: BakefileTarget::annotations(&date_time, &revision), + labels: BakefileTarget::labels(date_time, revision), + tags: vec![image_manifest_uri], + arguments: docker_build_arguments, + platforms: vec![args.target_platform.clone()], + context: PathBuf::from("."), + dockerfile, + contexts, + }; + + bakefile_targets.insert(target_name, target); + + // Add the target to the default group if it is an entry + if is_entry { + groups + .entry("default".to_owned()) + .or_default() + .targets + .push(Self::format_entry_target_name(&image_name, &image_version)); + } + } + } + + Ok(Self { + targets: bakefile_targets, + groups, + }) + } + + /// Formats and returns the entry target name, eg. `entry--opa-1_4_2`. + fn format_entry_target_name(image_name: &str, image_version: &str) -> String { + let target_name = Self::format_target_name(image_name, image_version); + format!("{ENTRY_TARGET_NAME_PREFIX}{target_name}") + } + + /// Formats and returns the target name, eg. `stackable-base-1_0_0`. + fn format_target_name(image_name: &str, image_version: &str) -> String { + // Replace any slashes from nested product names, eg. shared/protobuf, because docker buildx + // has this weird restriction (because it also supports push, which we do on our own). We + // are therefore artificially limited what target names we can use: [a-zA-Z0-9_-]+ + let product_name = image_name.replace('/', "__"); + + // The dots in the semantic version also need to be replaced. + let product_version = image_version.to_string().replace('.', "_"); + + format!("{product_name}-{product_version}") + } + + /// Formats and return the context name, eg. `stackable/image/stackable-base-1_0_0`. + fn format_context_name(name: &str) -> String { + format!("local-image/{name}") + // format!("stackable/image/{name}") + } + + /// Formats and returns the context target name, eg. `target:stackable-base-1_0_0`. + fn format_context_target(name: &str, version: &str) -> String { + let target_name = Self::format_target_name(name, version); + format!("target:{target_name}") + } + + fn now() -> Result { + time::UtcDateTime::now() + .format(&Rfc3339) + .context(FormatTimeSnafu) + } + + fn git_head_revision() -> Result { + let repo = git2::Repository::open(".").context(OpenRepositorySnafu)?; + let rev = repo.revparse("HEAD").context(ParseHeadRevisionSnafu)?; + + Ok(rev.from().unwrap().id().to_string()) + } +} + +// TODO (@Techassi): Figure out of we can use borrowed data in here. This would avoid a whole bunch +// of cloning. +#[derive(Debug, Serialize)] +pub struct BakefileTarget { + pub annotations: Vec, + pub context: PathBuf, + + #[serde(skip_serializing_if = "BTreeMap::is_empty")] + pub contexts: BTreeMap, + pub dockerfile: PathBuf, + + #[serde(rename = "args", skip_serializing_if = "BuildArguments::is_empty")] + pub arguments: BuildArguments, + + pub labels: BTreeMap, + pub tags: Vec, + pub platforms: Vec, +} + +impl BakefileTarget { + fn annotations(date_time: &str, revision: &str) -> Vec { + vec![ + format!("{OPEN_CONTAINER_IMAGE_CREATED}={date_time}"), + format!("{OPEN_CONTAINER_IMAGE_REVISION}={revision}"), + ] + } + + fn labels(date_time: String, revision: String) -> BTreeMap { + BTreeMap::from([ + (OPEN_CONTAINER_IMAGE_CREATED.to_owned(), date_time.clone()), + (OPEN_CONTAINER_IMAGE_REVISION.to_owned(), revision), + ("build-date".to_owned(), date_time), + ]) + } +} + +#[derive(Debug, Default, Serialize)] +pub struct BakefileGroup { + targets: Vec, +} + +// #[derive(Debug, Default)] +// pub struct Graph { +// targets: BTreeMap>, +// } + +// impl Graph { +// pub fn all() -> Self { +// let image_config_paths: Vec<_> = glob("./**/boil-config.toml") +// .expect("glob pattern must be valid") +// .filter_map(Result::ok) +// .collect(); + +// let mut targets = Self::default(); + +// for image_config_path in image_config_paths { +// let image_config = ImageConfig::from_file(&image_config_path).unwrap(); + +// let (image_name, _) = image_config_path +// .to_str() +// .unwrap() +// .rsplit_once('/') +// .unwrap(); + +// let pairs = image_config.filter_by_version(None).unwrap(); + +// targets.insert_targets(image_name.to_owned(), pairs); +// } + +// targets +// } + +// fn insert_targets( +// &mut self, +// image_name: String, +// pairs: Vec, +// ) -> Vec { +// let mut nodes = Vec::new(); + +// for VersionOptionsPair { version, options } in pairs { +// let key = format!("{image_name}:{version}"); +// let child_nodes = Vec::new(); + +// // let nodes = self.insert_targets(image_name, pairs); +// } +// } +// } diff --git a/rust/boil/src/build/cli.rs b/rust/boil/src/build/cli.rs new file mode 100644 index 000000000..871923941 --- /dev/null +++ b/rust/boil/src/build/cli.rs @@ -0,0 +1,122 @@ +use std::path::PathBuf; + +use clap::{Args, ValueHint, value_parser}; +use semver::Version; +use url::Host; + +use crate::{ + build::{ + docker::BuildArgument, + image::Image, + platform::{Architecture, TargetPlatform}, + }, + cli::parse_image_version, +}; + +#[derive(Debug, Args)] +pub struct BuildArguments { + /// The image(s) which should be build. The format is name[=version,...]. + #[arg(help_heading = "Image Options", required = true)] + pub images: Vec, + + // The action currently does the wrong thing here. It includes the + // architecture even though it should come from the --target-platform arg. + // The release arg is NOT needed, because this version IS the release version. + /// The image version being built. + #[arg( + short, long, + value_parser = parse_image_version, + default_value_t = Self::default_image_version(), + help_heading = "Image Options" + )] + pub image_version: Version, + + /// Target platform of the image. + #[arg( + short, long, + short_alias = 'a', alias = "architecture", + default_value_t = Self::default_architecture(), + help_heading = "Image Options" + )] + pub target_platform: TargetPlatform, + + /// Image registry used in image manifests, URIs, and tags. + #[arg( + short, long, + default_value_t = Self::default_registry(), + value_parser = Host::parse, + value_hint = ValueHint::Hostname, + help_heading = "Registry Options" + )] + pub registry: Host, + + /// The namespace within the given registry. + #[arg( + short = 'n', + long = "registry-namespace", + alias = "organization", + default_value = "sdp", + help_heading = "Registry Options" + )] + pub registry_namespace: String, + + /// Use 'localhost' as the registry instead of to avoid any accidental interactions + /// with remote registries. + /// + /// This is especially useful in CI, which can re-tag the image before pushing it. + #[arg(long, help_heading = "Registry Options")] + pub use_localhost_registry: bool, + + /// Override the target containerfile used, points to /. + #[arg( + long, + default_value_os_t = Self::default_target_containerfile(), + value_hint = ValueHint::FilePath, + help_heading = "Build Options" + )] + pub target_containerfile: PathBuf, + + /// Override build arguments, in key=value format. The key is case insensitive. This argument + /// can be supplied multiple times. + #[arg( + long = "build-argument", + alias = "build-arg", + help_heading = "Build Options" + )] + pub docker_build_arguments: Vec, + + /// Write target image tags to . Useful for signing or other follow-up CI steps. + #[arg( + long, + alias = "export-tags-file", + help_heading = "Build Options", + value_name = "EXPORT_FILE", + value_hint = ValueHint::FilePath, + value_parser = value_parser!(PathBuf), + default_missing_value = "boil-target-tags", + num_args(0..=1) + )] + pub export_image_manifest_uris: Option, + + /// Dry run. This does not build the image(s) but instead prints out the bakefile. + #[arg(short, long, alias = "dry")] + pub dry_run: bool, +} + +impl BuildArguments { + fn default_image_version() -> Version { + "0.0.0-dev".parse().expect("must be a valid SemVer") + } + + fn default_architecture() -> TargetPlatform { + TargetPlatform::Linux(Architecture::Amd64) + } + + fn default_registry() -> Host { + Host::Domain(String::from("oci.stackable.tech")) + } + + fn default_target_containerfile() -> PathBuf { + PathBuf::from("Dockerfile") + } +} diff --git a/rust/boil/src/build/docker.rs b/rust/boil/src/build/docker.rs new file mode 100644 index 000000000..38f10e3cb --- /dev/null +++ b/rust/boil/src/build/docker.rs @@ -0,0 +1,156 @@ +use std::{ + collections::BTreeSet, + fmt::Display, + ops::{Deref, DerefMut}, + str::FromStr, +}; + +use serde::{Deserialize, Serialize, de::Visitor, ser::SerializeMap}; +use snafu::{Snafu, ensure}; + +#[derive(Debug, Snafu)] +pub enum ParseBuildArgumentError { + NonAscii, +} + +// TODO (@Techassi): Unify parsing/casing in one place +#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] +pub struct BuildArgument((String, String)); + +impl BuildArgument { + pub fn new(key: String, value: String) -> Self { + Self((key.replace(['-', '/'], "_").to_uppercase(), value)) + } +} + +impl FromStr for BuildArgument { + type Err = ParseBuildArgumentError; + + fn from_str(s: &str) -> Result { + ensure!(s.is_ascii(), NonAsciiSnafu); + + let (key, value) = s.split_once('=').unwrap(); + let key = key.replace(['-', '/'], "_").to_uppercase(); + + Ok(Self((key, value.to_owned()))) + } +} + +impl<'de> Deserialize<'de> for BuildArgument { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + struct BuildArgumentVisitor; + + impl Visitor<'_> for BuildArgumentVisitor { + type Value = BuildArgument; + + fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { + write!(formatter, "a valid build argument") + } + + fn visit_str(self, v: &str) -> Result + where + E: serde::de::Error, + { + BuildArgument::from_str(v).map_err(serde::de::Error::custom) + } + } + + deserializer.deserialize_str(BuildArgumentVisitor) + } +} + +impl Display for BuildArgument { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let (key, value) = &self.0; + write!(f, "{key}={value}") + } +} + +#[derive(Clone, Debug, Default)] +pub struct BuildArguments(BTreeSet); + +impl Deref for BuildArguments { + type Target = BTreeSet; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl DerefMut for BuildArguments { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} + +impl Extend for BuildArguments { + fn extend>(&mut self, iter: T) { + self.0.extend(iter); + } +} + +impl IntoIterator for BuildArguments { + type Item = BuildArgument; + + type IntoIter = std::collections::btree_set::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + self.0.into_iter() + } +} + +impl<'de> Deserialize<'de> for BuildArguments { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + deserializer.deserialize_map(BuildArgumentsVisitor) + } +} + +struct BuildArgumentsVisitor; + +impl<'de> Visitor<'de> for BuildArgumentsVisitor { + type Value = BuildArguments; + + fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { + write!(formatter, "a map of valid build arguments") + } + + fn visit_map(self, mut map: A) -> Result + where + A: serde::de::MapAccess<'de>, + { + let mut args = BTreeSet::new(); + + while let Some((key, value)) = map.next_entry()? { + args.insert(BuildArgument::new(key, value)); + } + + Ok(BuildArguments(args)) + } +} + +impl Serialize for BuildArguments { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + let mut map = serializer.serialize_map(Some(self.len()))?; + + for BuildArgument((key, value)) in &self.0 { + map.serialize_entry(&key, &value)?; + } + + map.end() + } +} + +impl BuildArguments { + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } +} diff --git a/rust/boil/src/build/image.rs b/rust/boil/src/build/image.rs new file mode 100644 index 000000000..4f84e127b --- /dev/null +++ b/rust/boil/src/build/image.rs @@ -0,0 +1,163 @@ +use std::{ + collections::BTreeMap, + fmt::Display, + ops::Deref, + path::{Path, PathBuf}, + str::FromStr, +}; + +use serde::Deserialize; +use snafu::{ResultExt as _, Snafu}; + +use crate::{IfContext, build::docker::BuildArguments}; + +#[derive(Debug, Snafu)] +pub enum ParseImageError { + #[snafu(display("encountered invalid format, expected name[=version,...]"))] + InvalidFormat, +} + +#[derive(Clone, Debug)] +pub struct Image { + pub name: String, + pub versions: Vec, +} + +impl FromStr for Image { + type Err = ParseImageError; + + fn from_str(s: &str) -> Result { + let parts: Vec<_> = s.split('=').collect(); + + match parts.len() { + 1 => Ok(Self::new_unversioned(parts[0].to_owned())), + 2 => { + let versions: Vec<_> = parts[1].split(',').map(ToOwned::to_owned).collect(); + Ok(Self::new(parts[0].to_owned(), versions)) + } + _ => InvalidFormatSnafu.fail(), + } + } +} + +impl Display for Image { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + if self.versions.is_empty() { + f.write_str(&self.name) + } else { + write!( + f, + "{name}={versions}", + name = self.name, + versions = self.versions.join(",") + ) + } + } +} + +impl Image { + fn new(name: String, versions: Vec) -> Self { + Self { name, versions } + } + + fn new_unversioned(name: String) -> Self { + Self { + name, + versions: vec![], + } + } +} + +#[derive(Debug, Snafu)] +pub enum ImageConfigError { + #[snafu(display("failed to read config file at {path}", path = path.display()))] + ReadFile { + source: std::io::Error, + path: PathBuf, + }, + + #[snafu(display("failed to deserialize config file from TOML"))] + Deserialize { source: toml::de::Error }, + + #[snafu(display("provided filter version yielded empty list"))] + EmptyFilter, +} + +#[derive(Debug, Deserialize)] +pub struct ImageConfig { + pub versions: ImageVersions, +} + +impl ImageConfig { + pub fn filter_by_version( + self, + versions: &[V], + ) -> Result, ImageConfigError> + where + V: AsRef + PartialEq, + { + let versions: Vec<_> = self + .pairs() + .filter(|(image_version, _)| { + versions.is_empty() || versions.iter().any(|v| v.as_ref() == image_version) + }) + .map(Into::into) + .collect(); + + versions.if_context(|v| !v.is_empty(), EmptyFilterSnafu) + } + + pub fn all(self) -> Vec { + self.pairs().map(Into::into).collect() + } + + fn pairs(self) -> impl Iterator { + self.versions.0.into_iter() + } +} + +impl ImageConfig { + pub fn from_file(path: impl AsRef) -> Result { + let path = path.as_ref(); + let contents = std::fs::read_to_string(path).with_context(|_| ReadFileSnafu { path })?; + toml::from_str(&contents).context(DeserializeSnafu) + } +} + +#[derive(Debug, Deserialize)] +pub struct ImageVersions(BTreeMap); + +impl Deref for ImageVersions { + type Target = BTreeMap; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "kebab-case")] +pub struct ImageOptions { + #[serde(default)] + pub local_images: BTreeMap, + + // NOTE (@Techassi): Potentially add a dependencies field here which will be automatically be + // suffixed with _VERSION. + #[serde(default)] + pub build_arguments: BuildArguments, +} + +#[derive(Debug)] +pub struct VersionOptionsPair { + pub version: String, + pub options: ImageOptions, +} + +impl From<(String, ImageOptions)> for VersionOptionsPair { + fn from(value: (String, ImageOptions)) -> Self { + VersionOptionsPair { + version: value.0, + options: value.1, + } + } +} diff --git a/rust/boil/src/build/mod.rs b/rust/boil/src/build/mod.rs new file mode 100644 index 000000000..aa3753f4b --- /dev/null +++ b/rust/boil/src/build/mod.rs @@ -0,0 +1,105 @@ +use std::{ + fmt::Debug, + process::{Command, Stdio}, +}; + +use snafu::{OptionExt, ResultExt, Snafu, ensure}; + +use crate::{ + build::{bakefile::Bakefile, cli::BuildArguments}, + config::Config, +}; + +pub mod bakefile; +pub mod cli; +pub mod docker; +pub mod image; +pub mod platform; + +#[derive(Debug, Snafu)] +pub enum Error { + #[snafu(display("failed to create bakefile"))] + CreateBakefile { source: bakefile::Error }, + + #[snafu(display("failed to write image manifest URIs to file"))] + WriteImageManifestUrisFile { source: std::io::Error }, + + #[snafu(display("failed to serialize bakefile as JSON"))] + SerializeBakefile { source: serde_json::Error }, + + #[snafu(display("failed to acquire stdin handle"))] + AcquireStdinHandle, + + #[snafu(display("failed to run child process"))] + RunChildProcess { source: std::io::Error }, + + #[snafu(display("encountered invalid image version, must not include any build metadata"))] + InvalidImageVersion, +} + +pub fn run_command(args: BuildArguments, config: Config) -> Result<(), Error> { + // TODO (@Techassi): Parse Dockerfile instead to build the target graph + // let pattern = format!("**/{}/boil-config.toml", arguments.product.name); + + // Validation + ensure!( + args.image_version.build.is_empty(), + InvalidImageVersionSnafu + ); + + // Create bakefile + let bakefile = Bakefile::from_args(&args, config).context(CreateBakefileSnafu)?; + let image_manifest_uris = bakefile.image_manifest_uris(); + let count = image_manifest_uris.len(); + + // Write the image manifest URIs to file if requested + if let Some(path) = args.export_image_manifest_uris { + std::fs::write(path, image_manifest_uris.join("\n")) + .context(WriteImageManifestUrisFileSnafu)?; + } + + // Output the bakefile contents if in dry-run mode + if args.dry_run { + return serde_json::to_writer_pretty(std::io::stdout(), &bakefile) + .context(SerializeBakefileSnafu); + } + + // TODO (@Techassi): Invoke this directly using the Docker daemon via bollard + // Finally invoke the docker buildx bake command + let mut child = Command::new("docker") + .arg("buildx") + .arg("bake") + // .arg("--no-cache") + .arg("--file") + .arg("-") + .stdin(Stdio::piped()) + .spawn() + .unwrap(); + + let stdin_handle = child.stdin.take().with_context(|| { + child + .kill() + .expect("killing the child process must succeed"); + AcquireStdinHandleSnafu + })?; + + serde_json::to_writer(stdin_handle, &bakefile).with_context(|_| { + child + .kill() + .expect("killing the child process must succeed"); + SerializeBakefileSnafu + })?; + + let status = child.wait().context(RunChildProcessSnafu)?; + + // TODO (@Techassi): Return an error if the status was not a success + if status.success() { + println!( + "Successfully built {count} image{plural}:\n{images}", + plural = if count > 1 { "s" } else { "" }, + images = image_manifest_uris.join("\n") + ); + } + + Ok(()) +} diff --git a/rust/boil/src/build/platform.rs b/rust/boil/src/build/platform.rs new file mode 100644 index 000000000..857e4b532 --- /dev/null +++ b/rust/boil/src/build/platform.rs @@ -0,0 +1,67 @@ +use std::{fmt::Display, str::FromStr}; + +use serde::Serialize; +use snafu::{OptionExt as _, ResultExt, Snafu}; + +#[derive(Debug, Snafu)] +pub enum ParseArchitecturePairError { + #[snafu(display("encountered invalid format, expected platform/architecture"))] + InvalidFormat, + + #[snafu(display("failed to parse architecture"))] + ParseArchitecture { source: strum::ParseError }, + + #[snafu(display("unsupported, target platform"))] + UnsupportedPlatform, +} + +#[derive(Clone, Debug)] +pub enum TargetPlatform { + Linux(Architecture), +} + +impl Serialize for TargetPlatform { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + serializer.serialize_str(&self.to_string()) + } +} + +impl FromStr for TargetPlatform { + type Err = ParseArchitecturePairError; + + fn from_str(s: &str) -> Result { + let (platform, architecture) = s.split_once('/').context(InvalidFormatSnafu)?; + let architecture = Architecture::from_str(architecture).context(ParseArchitectureSnafu)?; + + match platform { + "linux" => Ok(Self::Linux(architecture)), + _ => UnsupportedPlatformSnafu.fail(), + } + } +} + +impl Display for TargetPlatform { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match &self { + TargetPlatform::Linux(architecture) => write!(f, "linux/{architecture}"), + } + } +} + +impl TargetPlatform { + pub fn architecture(&self) -> &Architecture { + match self { + TargetPlatform::Linux(architecture) => architecture, + } + } +} + +#[derive(Copy, Clone, Debug, strum::Display, strum::EnumString, strum::AsRefStr)] +#[strum(serialize_all = "lowercase")] +pub enum Architecture { + Amd64, + Arm64, +} diff --git a/rust/boil/src/cli.rs b/rust/boil/src/cli.rs new file mode 100644 index 000000000..5914fc3c2 --- /dev/null +++ b/rust/boil/src/cli.rs @@ -0,0 +1,89 @@ +use std::{path::PathBuf, str::FromStr}; + +use clap::{Args, Parser, Subcommand}; +use clap_complete::Shell; +use semver::Version; +use snafu::{ResultExt, Snafu, ensure}; + +use crate::build::cli::BuildArguments; + +#[derive(Debug, Parser)] +#[command(author, version, about)] +pub struct Cli { + /// Path to the configuration file. + #[arg(short = 'c', long = "configuration", default_value_os_t = Self::default_config_path())] + pub config_path: PathBuf, + + /// Path to the OpenShift configuration file. + #[arg(long, default_value_os_t = Self::default_openshift_config_path())] + pub openshift_config_path: PathBuf, + + #[arg(short, long, default_value_os_t = Self::default_base_path())] + pub base_path: PathBuf, + + #[command(subcommand)] + pub command: Command, +} + +impl Cli { + fn default_config_path() -> PathBuf { + PathBuf::from("./boil.toml") + } + + fn default_openshift_config_path() -> PathBuf { + PathBuf::from("./openshift.toml") + } + + fn default_base_path() -> PathBuf { + PathBuf::from(".") + } +} + +#[derive(Debug, Subcommand)] +pub enum Command { + /// Build one or more product images. + /// + /// Requires docker with the buildx extension. + #[command(alias = "some-chicken")] + Build(BuildArguments), + + /// Display various structured outputs in JSON format. + Show(ShowArguments), + + /// Generate shell completions. + Completions(CompletionsArguments), +} + +#[derive(Debug, Args)] +pub struct ShowArguments { + #[command(subcommand)] + pub commands: ShowCommand, +} + +#[derive(Debug, Subcommand)] +pub enum ShowCommand { + Images, + Tree, +} + +#[derive(Debug, Args)] +pub struct CompletionsArguments { + /// Shell to generate completions for. + pub shell: Shell, +} + +#[derive(Debug, Snafu)] +pub enum ParseImageVersionError { + #[snafu(display("failed to parse semantic version"))] + ParseVersion { source: semver::Error }, + + #[snafu(display("semantic version must not contain build metadata"))] + ContainsBuildMetadata, +} + +pub fn parse_image_version(input: &str) -> Result { + let version = Version::from_str(input).context(ParseVersionSnafu)?; + ensure!(version.build.is_empty(), ContainsBuildMetadataSnafu); + + Ok(version) +} diff --git a/rust/boil/src/completions/mod.rs b/rust/boil/src/completions/mod.rs new file mode 100644 index 000000000..14518365a --- /dev/null +++ b/rust/boil/src/completions/mod.rs @@ -0,0 +1,10 @@ +use clap::CommandFactory; + +use crate::cli::{Cli, CompletionsArguments}; + +pub fn run_command(arguments: CompletionsArguments) { + let mut cli = Cli::command(); + let bin_name = cli.get_bin_name().unwrap_or("boil").to_owned(); + + clap_complete::generate(arguments.shell, &mut cli, bin_name, &mut std::io::stdout()); +} diff --git a/rust/boil/src/config.rs b/rust/boil/src/config.rs new file mode 100644 index 000000000..64e16aee9 --- /dev/null +++ b/rust/boil/src/config.rs @@ -0,0 +1,26 @@ +use std::path::Path; + +use serde::Deserialize; +use snafu::{ResultExt, Snafu}; + +use crate::build::docker::BuildArguments; + +#[derive(Debug, Snafu)] +pub enum ConfigError { + ReadFile { source: std::io::Error }, + + Deserialize { source: toml::de::Error }, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "kebab-case")] +pub struct Config { + pub build_arguments: BuildArguments, +} + +impl Config { + pub fn from_file(path: impl AsRef) -> Result { + let contents = std::fs::read_to_string(path).context(ReadFileSnafu)?; + toml::from_str(&contents).context(DeserializeSnafu) + } +} diff --git a/rust/boil/src/main.rs b/rust/boil/src/main.rs new file mode 100644 index 000000000..fadb093d6 --- /dev/null +++ b/rust/boil/src/main.rs @@ -0,0 +1,94 @@ +use clap::Parser; +use semver::Version; +use snafu::{ResultExt, Snafu}; + +use crate::{ + cli::{Cli, Command, ShowCommand}, + config::Config, +}; + +// Common modules +mod cli; +mod config; +mod utils; + +// Command modules +mod build; +mod completions; +mod show; + +pub trait IfContext: Sized { + fn if_context(self, predicate: P, context: C) -> Result + where + P: Fn(&Self) -> bool, + C: snafu::IntoError, + E: std::error::Error + snafu::ErrorCompat; +} + +impl IfContext for T { + fn if_context(self, predicate: P, context: C) -> Result + where + P: Fn(&Self) -> bool, + C: snafu::IntoError, + E: std::error::Error + snafu::ErrorCompat, + { + match predicate(&self) { + true => Ok(self), + false => Err(context.into_error(snafu::NoneError)), + } + } +} + +pub trait VersionExt { + /// Returns the base of a [`Version`] as a string, eg. `1.2.3`. + fn base(&self) -> String; + + /// Returns the base and prerelease of a [`Version`] as a string, eg. `1.2.3-rc.1`. + fn base_prerelease(&self) -> String; +} + +impl VersionExt for Version { + fn base(&self) -> String { + format!("{}.{}.{}", self.major, self.minor, self.patch) + } + + fn base_prerelease(&self) -> String { + let mut base = self.base(); + base.push('-'); + base.push_str(&self.pre); + base + } +} + +#[derive(Debug, Snafu)] +enum Error { + #[snafu(display("failed to run build command"))] + Build { source: build::Error }, + + #[snafu(display("failed to run show command"))] + Show { source: show::images::Error }, + + #[snafu(display("failed to read config"))] + ReadConfig { source: config::ConfigError }, +} + +#[tokio::main(flavor = "current_thread")] +#[snafu::report] +async fn main() -> Result<(), Error> { + let cli = Cli::parse(); + + match cli.command { + Command::Build(arguments) => { + let config = Config::from_file(&cli.config_path).context(ReadConfigSnafu)?; + build::run_command(arguments, config).context(BuildSnafu) + } + Command::Show(arguments) => match arguments.commands { + ShowCommand::Images => show::images::run_command().context(ShowSnafu), + ShowCommand::Tree => todo!(), + }, + Command::Completions(arguments) => { + completions::run_command(arguments); + Ok(()) + } + } +} diff --git a/rust/boil/src/show/images.rs b/rust/boil/src/show/images.rs new file mode 100644 index 000000000..3baaa3929 --- /dev/null +++ b/rust/boil/src/show/images.rs @@ -0,0 +1,27 @@ +use std::collections::BTreeMap; + +use snafu::{ResultExt, Snafu}; + +use crate::build::bakefile::{Targets, TargetsOptions}; + +#[derive(Debug, Snafu)] +pub enum Error { + #[snafu(display("failed to serialize list as JSON"))] + SerializeList { source: serde_json::Error }, +} + +pub fn run_command() -> Result<(), Error> { + let list: BTreeMap<_, _> = Targets::all(TargetsOptions { only_entry: true }) + .unwrap() + .into_iter() + .map(|(image_name, image_versions)| { + let versions: Vec<_> = image_versions + .into_iter() + .map(|(image_version, (_, _))| image_version) + .collect(); + (image_name, versions) + }) + .collect(); + + serde_json::to_writer_pretty(std::io::stdout(), &list).context(SerializeListSnafu) +} diff --git a/rust/boil/src/show/mod.rs b/rust/boil/src/show/mod.rs new file mode 100644 index 000000000..8f0da9f8e --- /dev/null +++ b/rust/boil/src/show/mod.rs @@ -0,0 +1 @@ +pub mod images; diff --git a/rust/boil/src/utils.rs b/rust/boil/src/utils.rs new file mode 100644 index 000000000..561a949d6 --- /dev/null +++ b/rust/boil/src/utils.rs @@ -0,0 +1,23 @@ +use semver::Version; +use url::Host; + +use crate::build::platform::Architecture; + +/// Formats and returns the image repository URI, eg. `oci.stackable.tech/sdp/opa`. +pub fn format_image_repository_uri( + image_registry: &Host, + registry_namespace: &str, + image_name: &str, +) -> String { + format!("{image_registry}/{registry_namespace}/{image_name}") +} + +/// Formats and returns the image manifest URI, eg. `oci.stackable.tech/sdp/opa:1.4.2-stackable25.7.0-amd64`. +pub fn format_image_manifest_uri( + image_repository_uri: &str, + image_version: &str, + sdp_image_version: &Version, + architecture: &Architecture, +) -> String { + format!("{image_repository_uri}:{image_version}-stackable{sdp_image_version}-{architecture}") +} From 4e96bba3cff4a4b0b2d72903e0ab37d45a941488 Mon Sep 17 00:00:00 2001 From: Techassi Date: Tue, 12 Aug 2025 15:46:57 +0200 Subject: [PATCH 2/9] chore: Add boil configs and adjust Dockerfiles --- airflow/Dockerfile | 77 +++++---- airflow/boil-config.toml | 55 +++++++ druid/Dockerfile | 56 +++---- druid/boil-config.toml | 26 +++ hadoop/Dockerfile | 74 ++++----- hadoop/boil-config.toml | 22 +++ hadoop/hadoop/Dockerfile | 24 +-- hadoop/hadoop/boil-config.toml | 11 ++ hbase/Dockerfile | 86 +++++----- hbase/boil-config.toml | 25 +++ hbase/hbase-opa-authorizer/Dockerfile | 14 +- hbase/hbase-opa-authorizer/boil-config.toml | 5 + hbase/hbase-operator-tools/Dockerfile | 26 +-- hbase/hbase-operator-tools/boil-config.toml | 19 +++ hbase/hbase/Dockerfile | 32 ++-- hbase/hbase/boil-config.toml | 17 ++ hbase/phoenix/Dockerfile | 37 ++--- hbase/phoenix/boil-config.toml | 19 +++ hive/Dockerfile | 86 +++++----- hive/boil-config.toml | 38 +++++ java-base/Dockerfile | 16 +- java-base/boil-config.toml | 20 +++ java-devel/Dockerfile | 8 +- java-devel/boil-config.toml | 20 +++ jdk-base/Dockerfile | 16 +- jdk-base/boil-config.toml | 20 +++ kafka-testing-tools/Dockerfile | 20 +-- kafka-testing-tools/boil-config.toml | 4 + kafka/Dockerfile | 78 ++++----- kafka/boil-config.toml | 39 +++++ kafka/kafka-opa-plugin/Dockerfile | 10 +- kafka/kafka-opa-plugin/boil-config.toml | 2 + kafka/kcat/Dockerfile | 12 +- kafka/kcat/boil-config.toml | 3 + krb5/Dockerfile | 6 +- krb5/boil-config.toml | 1 + nifi/Dockerfile | 90 +++++----- nifi/boil-config.toml | 24 +++ omid/Dockerfile | 42 ++--- omid/boil-config.toml | 13 ++ opa/Dockerfile | 32 ++-- opa/boil-config.toml | 15 ++ opensearch/Dockerfile | 86 +++++----- opensearch/boil-config.toml | 4 + opensearch/security-plugin/Dockerfile | 12 +- opensearch/security-plugin/boil-config.toml | 2 + shared/statsd-exporter/Dockerfile | 14 +- shared/statsd-exporter/boil-config.toml | 2 + spark-connect-client/Dockerfile | 16 +- spark-connect-client/boil-config.toml | 6 + spark-k8s/Dockerfile | 172 ++++++++++---------- spark-k8s/boil-config.toml | 37 +++++ stackable-base/Dockerfile | 6 +- stackable-base/boil-config.toml | 2 + stackable-devel/Dockerfile | 2 +- stackable-devel/boil-config.toml | 1 + superset/Dockerfile | 66 ++++---- superset/boil-config.toml | 36 ++++ testing-tools/Dockerfile | 10 +- testing-tools/boil-config.toml | 2 + tools/Dockerfile | 10 +- tools/boil-config.toml | 7 + trino-cli/Dockerfile | 14 +- trino-cli/boil-config.toml | 2 + trino/Dockerfile | 32 ++-- trino/boil-config.toml | 23 +++ trino/storage-connector/Dockerfile | 18 +- trino/storage-connector/boil-config.toml | 11 ++ trino/trino/Dockerfile | 18 +- trino/trino/boil-config.toml | 8 + vector/Dockerfile | 8 +- vector/boil-config.toml | 6 + zookeeper/Dockerfile | 44 ++--- zookeeper/boil-config.toml | 6 + 74 files changed, 1237 insertions(+), 686 deletions(-) create mode 100644 airflow/boil-config.toml create mode 100644 druid/boil-config.toml create mode 100644 hadoop/boil-config.toml create mode 100644 hadoop/hadoop/boil-config.toml create mode 100644 hbase/boil-config.toml create mode 100644 hbase/hbase-opa-authorizer/boil-config.toml create mode 100644 hbase/hbase-operator-tools/boil-config.toml create mode 100644 hbase/hbase/boil-config.toml create mode 100644 hbase/phoenix/boil-config.toml create mode 100644 hive/boil-config.toml create mode 100644 java-base/boil-config.toml create mode 100644 java-devel/boil-config.toml create mode 100644 jdk-base/boil-config.toml create mode 100644 kafka-testing-tools/boil-config.toml create mode 100644 kafka/boil-config.toml create mode 100644 kafka/kafka-opa-plugin/boil-config.toml create mode 100644 kafka/kcat/boil-config.toml create mode 100644 krb5/boil-config.toml create mode 100644 nifi/boil-config.toml create mode 100644 omid/boil-config.toml create mode 100644 opa/boil-config.toml create mode 100644 opensearch/boil-config.toml create mode 100644 opensearch/security-plugin/boil-config.toml create mode 100644 shared/statsd-exporter/boil-config.toml create mode 100644 spark-connect-client/boil-config.toml create mode 100644 spark-k8s/boil-config.toml create mode 100644 stackable-base/boil-config.toml create mode 100644 stackable-devel/boil-config.toml create mode 100644 superset/boil-config.toml create mode 100644 testing-tools/boil-config.toml create mode 100644 tools/boil-config.toml create mode 100644 trino-cli/boil-config.toml create mode 100644 trino/boil-config.toml create mode 100644 trino/storage-connector/boil-config.toml create mode 100644 trino/trino/boil-config.toml create mode 100644 vector/boil-config.toml create mode 100644 zookeeper/boil-config.toml diff --git a/airflow/Dockerfile b/airflow/Dockerfile index 837c50066..164edb912 100644 --- a/airflow/Dockerfile +++ b/airflow/Dockerfile @@ -3,19 +3,19 @@ # - SecretsUsedInArgOrEnv : OPA_AUTH_MANAGER is a false positive and breaks the build. # check=error=true;skip=InvalidDefaultArgInFrom,SecretsUsedInArgOrEnv -ARG GIT_SYNC +ARG GIT_SYNC_VERSION # For updated versions check https://github.com/kubernetes/git-sync/releases # which should contain a image location (e.g. registry.k8s.io/git-sync/git-sync:v3.6.8) -FROM oci.stackable.tech/sdp/git-sync/git-sync:${GIT_SYNC} AS gitsync-image +FROM oci.stackable.tech/sdp/git-sync/git-sync:${GIT_SYNC_VERSION} AS gitsync-image -FROM stackable/image/shared/statsd-exporter AS statsd_exporter-builder +FROM local-image/shared/statsd-exporter AS statsd_exporter-builder -FROM stackable/image/vector AS opa-auth-manager-builder +FROM local-image/vector AS opa-auth-manager-builder ARG OPA_AUTH_MANAGER -ARG PYTHON -ARG UV +ARG PYTHON_VERSION +ARG UV_VERSION COPY airflow/opa-auth-manager/${OPA_AUTH_MANAGER} /tmp/opa-auth-manager @@ -23,10 +23,10 @@ WORKDIR /tmp/opa-auth-manager RUN < /stackable/app/airflow-${PRODUCT}.cdx.json +end)' /tmp/sbom.json > /stackable/app/airflow-${PRODUCT_VERSION}.cdx.json EOF COPY --from=statsd_exporter-builder /statsd_exporter/statsd_exporter /stackable/statsd_exporter -COPY --from=statsd_exporter-builder /statsd_exporter/statsd_exporter-${SHARED_STATSD_EXPORTER}.cdx.json /stackable/statsd_exporter-${SHARED_STATSD_EXPORTER}.cdx.json +COPY --from=statsd_exporter-builder /statsd_exporter/statsd_exporter-${SHARED_STATSD_EXPORTER_VERSION}.cdx.json /stackable/statsd_exporter-${SHARED_STATSD_EXPORTER_VERSION}.cdx.json COPY --from=gitsync-image --chown=${STACKABLE_USER_UID}:0 /git-sync /stackable/git-sync RUN < /tmp/DRUID_SOURCE_DIR +RUN /stackable/patchable --images-repo-root=src checkout druid ${PRODUCT_VERSION} > /tmp/DRUID_SOURCE_DIR -RUN --mount=type=cache,id=maven-${PRODUCT},uid=${STACKABLE_USER_UID},target=/stackable/.m2/repository \ - --mount=type=cache,id=npm-${PRODUCT},uid=${STACKABLE_USER_UID},target=/stackable/.npm \ - --mount=type=cache,id=cache-${PRODUCT},uid=${STACKABLE_USER_UID},target=/stackable/.cache \ +RUN --mount=type=cache,id=maven-${PRODUCT_VERSION},uid=${STACKABLE_USER_UID},target=/stackable/.m2/repository \ + --mount=type=cache,id=npm-${PRODUCT_VERSION},uid=${STACKABLE_USER_UID},target=/stackable/.npm \ + --mount=type=cache,id=cache-${PRODUCT_VERSION},uid=${STACKABLE_USER_UID},target=/stackable/.cache \ < /stackable/package_manifest.txt +rpm -qa --qf "%{NAME}-%{VERSION}-%{RELEASE_VERSION}\n" | sort > /stackable/package_manifest.txt chown ${STACKABLE_USER_UID}:0 /stackable/package_manifest.txt chmod g=u /stackable/package_manifest.txt rm -rf /var/cache/yum -ln -sf /stackable/apache-druid-${PRODUCT}-stackable${RELEASE} /stackable/druid +ln -sf /stackable/apache-druid-${PRODUCT_VERSION}-stackable${RELEASE_VERSION} /stackable/druid chown -h ${STACKABLE_USER_UID}:0 stackable/druid # Force to overwrite the existing 'run-druid' @@ -159,7 +159,7 @@ chown -h ${STACKABLE_USER_UID}:0 /stackable/druid/bin/run-druid # fix missing permissions chmod -R g=u /stackable/bin -chmod g=u /stackable/apache-druid-${PRODUCT}-stackable${RELEASE} /stackable/druid-${PRODUCT}-stackable${RELEASE}-src.tar.gz +chmod g=u /stackable/apache-druid-${PRODUCT_VERSION}-stackable${RELEASE_VERSION} /stackable/druid-${PRODUCT_VERSION}-stackable${RELEASE_VERSION}-src.tar.gz EOF # ---------------------------------------- diff --git a/druid/boil-config.toml b/druid/boil-config.toml new file mode 100644 index 000000000..8a30bc28e --- /dev/null +++ b/druid/boil-config.toml @@ -0,0 +1,26 @@ +[versions."30.0.1".local-images] +# https://druid.apache.org/docs/30.0.1/operations/java/ +java-base = "17" +java-devel = "17" +"hadoop/hadoop" = "3.3.6" + +[versions."30.0.1".build-arguments] +authorizer-version = "0.7.0" + +[versions."31.0.1".local-images] +# https://druid.apache.org/docs/31.0.1/operations/java/ +java-base = "17" +java-devel = "17" +"hadoop/hadoop" = "3.3.6" + +[versions."31.0.1".build-arguments] +authorizer-version = "0.7.0" + +[versions."33.0.0".local-images] +# https://druid.apache.org/docs/33.0.0/operations/java/ +java-base = "17" +java-devel = "17" +"hadoop/hadoop" = "3.3.6" + +[versions."33.0.0".build-arguments] +authorizer-version = "0.7.0" diff --git a/hadoop/Dockerfile b/hadoop/Dockerfile index 5ca3cd6b1..5a8892fc3 100644 --- a/hadoop/Dockerfile +++ b/hadoop/Dockerfile @@ -1,19 +1,19 @@ # syntax=docker/dockerfile:1.16.0@sha256:e2dd261f92e4b763d789984f6eab84be66ab4f5f08052316d8eb8f173593acf7 # check=error=true -FROM stackable/image/hadoop/hadoop AS hadoop-builder +FROM local-image/hadoop/hadoop AS hadoop-builder -FROM stackable/image/java-devel AS hdfs-utils-builder +FROM local-image/java-devel AS hdfs-utils-builder -ARG HDFS_UTILS -ARG PRODUCT -ARG RELEASE +ARG HDFS_UTILS_VERSION +ARG PRODUCT_VERSION +ARG RELEASE_VERSION ARG STACKABLE_USER_UID -ARG HADOOP_HADOOP +ARG HADOOP_HADOOP_VERSION # Reassign the arg to `HADOOP_VERSION` for better readability. -# It is passed as `HADOOP_HADOOP`, because versions.py has to contain `hadoop/hadoop` to establish a dependency on the Hadoop builder. -# The value of `hadoop/hadoop` is transformed by `bake` and automatically passed as `HADOOP_HADOOP` arg. -ENV HADOOP_VERSION=${HADOOP_HADOOP} +# It is passed as `HADOOP_HADOOP_VERSION`, because versions.py has to contain `hadoop/hadoop` to establish a dependency on the Hadoop builder. +# The value of `hadoop/hadoop` is transformed by `bake` and automatically passed as `HADOOP_HADOOP_VERSION` arg. +ENV HADOOP_VERSION=${HADOOP_HADOOP_VERSION} # Starting with hdfs-utils 0.4.0 we need to use Java 17 for compilation. # We can not simply use java-devel with Java 17, as it is also used to compile Hadoop in this @@ -33,7 +33,7 @@ USER ${STACKABLE_USER_UID} WORKDIR /stackable COPY --chown=${STACKABLE_USER_UID}:0 hadoop/hdfs-utils/stackable/patches/patchable.toml /stackable/src/hadoop/hdfs-utils/stackable/patches/patchable.toml -COPY --chown=${STACKABLE_USER_UID}:0 hadoop/hdfs-utils/stackable/patches/${HDFS_UTILS} /stackable/src/hadoop/hdfs-utils/stackable/patches/${HDFS_UTILS} +COPY --chown=${STACKABLE_USER_UID}:0 hadoop/hdfs-utils/stackable/patches/${HDFS_UTILS_VERSION} /stackable/src/hadoop/hdfs-utils/stackable/patches/${HDFS_UTILS_VERSION} COPY --from=hadoop-builder --chown=${STACKABLE_USER_UID}:0 /stackable/patched-libs /stackable/patched-libs @@ -42,40 +42,40 @@ COPY --from=hadoop-builder --chown=${STACKABLE_USER_UID}:0 /stackable/patched-li # labels to build a rackID from. # Starting with hdfs-utils version 0.3.0 the topology provider is not a standalone jar anymore and included in hdfs-utils. RUN < /stackable/package_manifest.txt +rpm -qa --qf "%{NAME}-%{VERSION}-%{RELEASE_VERSION}\n" | sort > /stackable/package_manifest.txt chown ${STACKABLE_USER_UID}:0 /stackable/package_manifest.txt chmod g=u /stackable/package_manifest.txt rm -rf /var/cache/yum @@ -119,21 +119,21 @@ rm -rf /var/cache/yum # It is so non-root users (as we are) can mount a FUSE device and let other users access it echo "user_allow_other" > /etc/fuse.conf -ln -s "/stackable/hadoop-${HADOOP_VERSION}-stackable${RELEASE}" /stackable/hadoop +ln -s "/stackable/hadoop-${HADOOP_VERSION}-stackable${RELEASE_VERSION}" /stackable/hadoop # async-profiler ARCH="${TARGETARCH/amd64/x64}" -curl "https://repo.stackable.tech/repository/packages/async-profiler/async-profiler-${ASYNC_PROFILER}-${TARGETOS}-${ARCH}.tar.gz" | tar -xzC /stackable -ln -s "/stackable/async-profiler-${ASYNC_PROFILER}-${TARGETOS}-${ARCH}" /stackable/async-profiler +curl "https://repo.stackable.tech/repository/packages/async-profiler/async-profiler-${ASYNC_PROFILER_VERSION}-${TARGETOS}-${ARCH}.tar.gz" | tar -xzC /stackable +ln -s "/stackable/async-profiler-${ASYNC_PROFILER_VERSION}-${TARGETOS}-${ARCH}" /stackable/async-profiler # JMX Exporter -curl "https://repo.stackable.tech/repository/packages/jmx-exporter/jmx_prometheus_javaagent-${JMX_EXPORTER}.jar" -o "/stackable/jmx/jmx_prometheus_javaagent-${JMX_EXPORTER}.jar" -chmod -x "/stackable/jmx/jmx_prometheus_javaagent-${JMX_EXPORTER}.jar" -ln -s "/stackable/jmx/jmx_prometheus_javaagent-${JMX_EXPORTER}.jar" /stackable/jmx/jmx_prometheus_javaagent.jar +curl "https://repo.stackable.tech/repository/packages/jmx-exporter/jmx_prometheus_javaagent-${JMX_EXPORTER_VERSION}.jar" -o "/stackable/jmx/jmx_prometheus_javaagent-${JMX_EXPORTER_VERSION}.jar" +chmod -x "/stackable/jmx/jmx_prometheus_javaagent-${JMX_EXPORTER_VERSION}.jar" +ln -s "/stackable/jmx/jmx_prometheus_javaagent-${JMX_EXPORTER_VERSION}.jar" /stackable/jmx/jmx_prometheus_javaagent.jar # Set correct permissions and ownerships -chown --recursive ${STACKABLE_USER_UID}:0 /stackable/hadoop /stackable/jmx /stackable/async-profiler "/stackable/async-profiler-${ASYNC_PROFILER}-${TARGETOS}-${ARCH}" -chmod --recursive g=u /stackable/jmx /stackable/async-profiler "/stackable/hadoop-${HADOOP_VERSION}-stackable${RELEASE}" +chown --recursive ${STACKABLE_USER_UID}:0 /stackable/hadoop /stackable/jmx /stackable/async-profiler "/stackable/async-profiler-${ASYNC_PROFILER_VERSION}-${TARGETOS}-${ARCH}" +chmod --recursive g=u /stackable/jmx /stackable/async-profiler "/stackable/hadoop-${HADOOP_VERSION}-stackable${RELEASE_VERSION}" # Workaround for https://issues.apache.org/jira/browse/HADOOP-12845 # The problem is that our stackable-devel image does contain the openssl-devel package diff --git a/hadoop/boil-config.toml b/hadoop/boil-config.toml new file mode 100644 index 000000000..b0d0641f2 --- /dev/null +++ b/hadoop/boil-config.toml @@ -0,0 +1,22 @@ +# Not part of SDP 25.7.0, but still required for hbase, hive, spark-k8s +[versions."3.3.6".local-images] +"hadoop/hadoop" = "3.3.6" +java-base = "11" +java-devel = "11" + +[versions."3.3.6".build-arguments] +async-profiler-version = "2.9" +jmx-exporter-version = "1.3.0" +protobuf-version = "3.7.1" +hdfs-utils-version = "0.4.0" + +[versions."3.4.1".local-images] +"hadoop/hadoop" = "3.4.1" +java-base = "11" +java-devel = "11" + +[versions."3.4.1".build-arguments] +async-profiler-version = "2.9" +jmx-exporter-version = "1.3.0" +protobuf-version = "3.7.1" +hdfs-utils-version = "0.4.1" diff --git a/hadoop/hadoop/Dockerfile b/hadoop/hadoop/Dockerfile index 342eef556..a17feec73 100644 --- a/hadoop/hadoop/Dockerfile +++ b/hadoop/hadoop/Dockerfile @@ -1,17 +1,17 @@ # syntax=docker/dockerfile:1.16.0@sha256:e2dd261f92e4b763d789984f6eab84be66ab4f5f08052316d8eb8f173593acf7 # check=error=true -FROM stackable/image/java-devel AS hadoop-builder +FROM local-image/java-devel AS hadoop-builder -ARG PRODUCT -ARG RELEASE -ARG PROTOBUF +ARG PRODUCT_VERSION +ARG RELEASE_VERSION +ARG PROTOBUF_VERSION ARG STACKABLE_USER_UID WORKDIR /stackable COPY --chown=${STACKABLE_USER_UID}:0 shared/protobuf/stackable/patches/patchable.toml /stackable/src/shared/protobuf/stackable/patches/patchable.toml -COPY --chown=${STACKABLE_USER_UID}:0 shared/protobuf/stackable/patches/${PROTOBUF} /stackable/src/shared/protobuf/stackable/patches/${PROTOBUF} +COPY --chown=${STACKABLE_USER_UID}:0 shared/protobuf/stackable/patches/${PROTOBUF_VERSION} /stackable/src/shared/protobuf/stackable/patches/${PROTOBUF_VERSION} RUN < /stackable/bin/export-snapshot-to-s3 +envsubst '${HBASE_VERSION}:${RELEASE_VERSION}:${LIBS}' < /stackable/bin/export-snapshot-to-s3.env > /stackable/bin/export-snapshot-to-s3 chmod +x /stackable/bin/export-snapshot-to-s3 rm /stackable/bin/export-snapshot-to-s3.env @@ -58,35 +58,35 @@ chmod --recursive g=u /stackable EOF # Final Image -FROM stackable/image/java-base AS final +FROM local-image/java-base AS final -ARG PRODUCT -ARG RELEASE -ARG HADOOP_HADOOP +ARG PRODUCT_VERSION +ARG RELEASE_VERSION +ARG HADOOP_HADOOP_VERSION # Reassign the arg to `HADOOP_VERSION` for better readability. -ENV HADOOP_VERSION=${HADOOP_HADOOP} +ENV HADOOP_VERSION=${HADOOP_HADOOP_VERSION} ARG HBASE_PROFILE -ARG HBASE_HBASE +ARG HBASE_HBASE_VERSION # Reassign the arg to `HBASE_VERSION` for better readability. -ENV HBASE_VERSION=${HBASE_HBASE} -ARG HBASE_HBASE_OPERATOR_TOOLS -ARG HBASE_HBASE_OPA_AUTHORIZER -ARG HBASE_PHOENIX +ENV HBASE_VERSION=${HBASE_HBASE_VERSION} +ARG HBASE_HBASE_OPERATOR_TOOLS_VERSION +ARG HBASE_HBASE_OPA_AUTHORIZER_VERSION +ARG HBASE_PHOENIX_VERSION ARG STACKABLE_USER_UID ARG NAME="Apache HBase" ARG DESCRIPTION="This image is deployed by the Stackable Operator for Apache HBase" LABEL name="${NAME}" -LABEL version="${PRODUCT}" -LABEL release="${RELEASE}" +LABEL version="${PRODUCT_VERSION}" +LABEL release="${RELEASE_VERSION}" LABEL summary="The Stackable image for Apache HBase" LABEL description="${DESCRIPTION}" # https://github.com/opencontainers/image-spec/blob/036563a4a268d7c08b51a08f05a02a0fe74c7268/annotations.md#annotations LABEL org.opencontainers.image.documentation="https://docs.stackable.tech/home/stable/hbase/" -LABEL org.opencontainers.image.version="${PRODUCT}" -LABEL org.opencontainers.image.revision="${RELEASE}" +LABEL org.opencontainers.image.version="${PRODUCT_VERSION}" +LABEL org.opencontainers.image.revision="${RELEASE_VERSION}" LABEL org.opencontainers.image.title="${NAME}" LABEL org.opencontainers.image.description="${DESCRIPTION}" @@ -96,17 +96,17 @@ LABEL io.openshift.tags="ubi9,stackable,hbase,sdp,nosql" LABEL io.k8s.description="${DESCRIPTION}" LABEL io.k8s.display-name="${NAME}" -COPY --chown=${STACKABLE_USER_UID}:0 --from=hbase-builder /stackable/hbase-${HBASE_VERSION}-stackable${RELEASE} /stackable/hbase-${HBASE_VERSION}-stackable${RELEASE}/ -COPY --chown=${STACKABLE_USER_UID}:0 --from=hbase-builder /stackable/hbase-${HBASE_VERSION}-stackable${RELEASE}-src.tar.gz /stackable +COPY --chown=${STACKABLE_USER_UID}:0 --from=hbase-builder /stackable/hbase-${HBASE_VERSION}-stackable${RELEASE_VERSION} /stackable/hbase-${HBASE_VERSION}-stackable${RELEASE_VERSION}/ +COPY --chown=${STACKABLE_USER_UID}:0 --from=hbase-builder /stackable/hbase-${HBASE_VERSION}-stackable${RELEASE_VERSION}-src.tar.gz /stackable COPY --chown=${STACKABLE_USER_UID}:0 --from=hbase-builder /stackable/async-profiler /stackable/async-profiler/ -COPY --chown=${STACKABLE_USER_UID}:0 --from=hbase-operator-tools /stackable/hbase-operator-tools-${HBASE_HBASE_OPERATOR_TOOLS}-stackable${RELEASE} /stackable/hbase-operator-tools-${HBASE_HBASE_OPERATOR_TOOLS}-stackable${RELEASE}/ -COPY --chown=${STACKABLE_USER_UID}:0 --from=hbase-operator-tools /stackable/hbase-operator-tools-${HBASE_HBASE_OPERATOR_TOOLS}-stackable${RELEASE}-src.tar.gz /stackable +COPY --chown=${STACKABLE_USER_UID}:0 --from=hbase-operator-tools /stackable/hbase-operator-tools-${HBASE_HBASE_OPERATOR_TOOLS_VERSION}-stackable${RELEASE_VERSION} /stackable/hbase-operator-tools-${HBASE_HBASE_OPERATOR_TOOLS_VERSION}-stackable${RELEASE_VERSION}/ +COPY --chown=${STACKABLE_USER_UID}:0 --from=hbase-operator-tools /stackable/hbase-operator-tools-${HBASE_HBASE_OPERATOR_TOOLS_VERSION}-stackable${RELEASE_VERSION}-src.tar.gz /stackable COPY --chown=${STACKABLE_USER_UID}:0 --from=hbase-operator-tools /stackable/bin/hbck2 /stackable/bin/hbck2 -COPY --chown=${STACKABLE_USER_UID}:0 --from=hbase-operator-tools /stackable/bin/hbase-entrypoint.sh /stackable/hbase-${HBASE_VERSION}-stackable${RELEASE}/bin/hbase-entrypoint.sh +COPY --chown=${STACKABLE_USER_UID}:0 --from=hbase-operator-tools /stackable/bin/hbase-entrypoint.sh /stackable/hbase-${HBASE_VERSION}-stackable${RELEASE_VERSION}/bin/hbase-entrypoint.sh COPY --chown=${STACKABLE_USER_UID}:0 --from=phoenix /stackable/phoenix /stackable/phoenix/ -COPY --chown=${STACKABLE_USER_UID}:0 --from=phoenix /stackable/phoenix-${HBASE_PHOENIX}-stackable${RELEASE}-src.tar.gz /stackable +COPY --chown=${STACKABLE_USER_UID}:0 --from=phoenix /stackable/phoenix-${HBASE_PHOENIX_VERSION}-stackable${RELEASE_VERSION}-src.tar.gz /stackable COPY --chown=${STACKABLE_USER_UID}:0 --from=hadoop-s3-builder /stackable/bin/export-snapshot-to-s3 /stackable/bin/export-snapshot-to-s3 COPY --chown=${STACKABLE_USER_UID}:0 --from=hadoop-s3-builder /stackable/hadoop/share/hadoop/tools/lib/ /stackable/hadoop/share/hadoop/tools/lib/ @@ -116,11 +116,11 @@ COPY --chown=${STACKABLE_USER_UID}:0 --from=hadoop-s3-builder /stackable/hadoop/ # hadoop-azure-${HADOOP}.jar contains the AzureBlobFileSystem which is required # by hadoop-common-${HADOOP}.jar if the scheme of a file system is "abfs://". COPY --chown=${STACKABLE_USER_UID}:0 --from=hadoop-builder \ - /stackable/hadoop/share/hadoop/tools/lib/hadoop-azure-${HADOOP_VERSION}-stackable${RELEASE}.jar \ - /stackable/hbase-${HBASE_VERSION}-stackable${RELEASE}/lib/ + /stackable/hadoop/share/hadoop/tools/lib/hadoop-azure-${HADOOP_VERSION}-stackable${RELEASE_VERSION}.jar \ + /stackable/hbase-${HBASE_VERSION}-stackable${RELEASE_VERSION}/lib/ -COPY --chown=${STACKABLE_USER_UID}:0 --from=hbase-opa-authorizer /stackable/hbase-opa-authorizer-${HBASE_HBASE_OPA_AUTHORIZER}-src.tar.gz /stackable -COPY --chown=${STACKABLE_USER_UID}:0 --from=hbase-opa-authorizer /stackable/hbase-opa-authorizer/target/hbase-opa-authorizer*.jar /stackable/hbase-${HBASE_VERSION}-stackable${RELEASE}/lib +COPY --chown=${STACKABLE_USER_UID}:0 --from=hbase-opa-authorizer /stackable/hbase-opa-authorizer-${HBASE_HBASE_OPA_AUTHORIZER_VERSION}-src.tar.gz /stackable +COPY --chown=${STACKABLE_USER_UID}:0 --from=hbase-opa-authorizer /stackable/hbase-opa-authorizer/target/hbase-opa-authorizer*.jar /stackable/hbase-${HBASE_VERSION}-stackable${RELEASE_VERSION}/lib RUN < /stackable/package_manifest.txt +rpm -qa --qf "%{NAME}-%{VERSION}-%{RELEASE_VERSION}\n" | sort > /stackable/package_manifest.txt chown ${STACKABLE_USER_UID}:0 /stackable/package_manifest.txt chmod g=u /stackable/package_manifest.txt rm -rf /var/cache/yum -ln --symbolic --logical --verbose "/stackable/hbase-${HBASE_VERSION}-stackable${RELEASE}" /stackable/hbase +ln --symbolic --logical --verbose "/stackable/hbase-${HBASE_VERSION}-stackable${RELEASE_VERSION}" /stackable/hbase chown --no-dereference ${STACKABLE_USER_UID}:0 /stackable/hbase chmod g=u /stackable/hbase -ln --symbolic --logical --verbose "/stackable/hbase-operator-tools-${HBASE_HBASE_OPERATOR_TOOLS}-stackable${RELEASE}" /stackable/hbase-operator-tools +ln --symbolic --logical --verbose "/stackable/hbase-operator-tools-${HBASE_HBASE_OPERATOR_TOOLS_VERSION}-stackable${RELEASE_VERSION}" /stackable/hbase-operator-tools chown --no-dereference ${STACKABLE_USER_UID}:0 /stackable/hbase-operator-tools chmod g=u /stackable/hbase-operator-tools diff --git a/hbase/boil-config.toml b/hbase/boil-config.toml new file mode 100644 index 000000000..2aa8bd793 --- /dev/null +++ b/hbase/boil-config.toml @@ -0,0 +1,25 @@ +[versions."2.6.1".local-images] +"hbase/hbase" = "2.6.1" +"hbase/hbase-operator-tools" = "1.3.0-fd5a5fb-hbase2.6.1" +"hbase/phoenix" = "5.2.1-hbase2.6.1" +"hbase/hbase-opa-authorizer" = "0.1.0" # only for HBase 2.6.1 +"hadoop/hadoop" = "3.3.6" +java-base = "11" +java-devel = "11" + +[versions."2.6.1".build-arguments] +hbase-profile = "2.6" +delete-caches = "true" + +[versions."2.6.2".local-images] +"hbase/hbase" = "2.6.2" +"hbase/hbase-operator-tools" = "1.3.0-fd5a5fb-hbase2.6.2" +"hbase/phoenix" = "5.2.1-hbase2.6.2" +"hbase/hbase-opa-authorizer" = "0.1.0" # only for HBase 2.6.1 +"hadoop/hadoop" = "3.4.1" +java-base = "11" +java-devel = "11" + +[versions."2.6.2".build-arguments] +hbase-profile = "2.6" +delete-caches = "true" diff --git a/hbase/hbase-opa-authorizer/Dockerfile b/hbase/hbase-opa-authorizer/Dockerfile index 78f5a7115..c43e4990a 100644 --- a/hbase/hbase-opa-authorizer/Dockerfile +++ b/hbase/hbase-opa-authorizer/Dockerfile @@ -1,6 +1,6 @@ -FROM stackable/image/java-devel +FROM local-image/java-devel -ARG PRODUCT +ARG PRODUCT_VERSION ARG DELETE_CACHES ARG STACKABLE_USER_UID @@ -8,17 +8,17 @@ USER ${STACKABLE_USER_UID} WORKDIR /stackable COPY --chown=${STACKABLE_USER_UID}:0 hbase/hbase-opa-authorizer/stackable/patches/patchable.toml /stackable/src/hbase/hbase-opa-authorizer/stackable/patches/patchable.toml -COPY --chown=${STACKABLE_USER_UID}:0 hbase/hbase-opa-authorizer/stackable/patches/${PRODUCT} /stackable/src/hbase/hbase-opa-authorizer/stackable/patches/${PRODUCT} +COPY --chown=${STACKABLE_USER_UID}:0 hbase/hbase-opa-authorizer/stackable/patches/${PRODUCT_VERSION} /stackable/src/hbase/hbase-opa-authorizer/stackable/patches/${PRODUCT_VERSION} RUN --mount=type=cache,id=maven-opa,uid=${STACKABLE_USER_UID},target=/stackable/.m2/repository < /stackable/package_manifest.txt +rpm -qa --qf "%{NAME}-%{VERSION}-%{RELEASE_VERSION}\n" | sort > /stackable/package_manifest.txt chown ${STACKABLE_USER_UID}:0 /stackable/package_manifest.txt chmod g=u /stackable/package_manifest.txt rm -rf /var/cache/yum -chmod g=u /stackable/apache-hive-metastore-${PRODUCT}-stackable${RELEASE}-bin/bin/start-metastore +chmod g=u /stackable/apache-hive-metastore-${PRODUCT_VERSION}-stackable${RELEASE_VERSION}-bin/bin/start-metastore -ln -s /stackable/apache-hive-metastore-${PRODUCT}-stackable${RELEASE}-bin /stackable/hive-metastore +ln -s /stackable/apache-hive-metastore-${PRODUCT_VERSION}-stackable${RELEASE_VERSION}-bin /stackable/hive-metastore chown -h ${STACKABLE_USER_UID}:0 /stackable/hive-metastore chmod g=u /stackable/hive-metastore -ln -s /stackable/hadoop-${HADOOP_VERSION}-stackable${RELEASE} /stackable/hadoop +ln -s /stackable/hadoop-${HADOOP_VERSION}-stackable${RELEASE_VERSION} /stackable/hadoop chown -h ${STACKABLE_USER_UID}:0 /stackable/hadoop chmod g=u /stackable/hadoop chmod g=u /stackable/*-src.tar.gz diff --git a/hive/boil-config.toml b/hive/boil-config.toml new file mode 100644 index 000000000..7bbcca500 --- /dev/null +++ b/hive/boil-config.toml @@ -0,0 +1,38 @@ +[versions."3.1.3".local-images] +# Hive 3 must be built with Java 8 but will run on Java 11 +java-base = "11" +java-devel = "8" +"hadoop/hadoop" = "3.3.6" + +[versions."3.1.3".build-arguments] +jmx-exporter-version = "1.3.0" +# Keep consistent with the dependency from Hadoop: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-aws/3.3.6 +aws-java-sdk-bundle-version = "1.12.367" +azure-storage-version = "7.0.1" +azure-keyvault-core-version = "1.0.0" + +[versions."4.0.0".local-images] +# Hive 4 must be built with Java 8 (according to GitHub README) but seems to run on Java 11 +java-base = "11" +java-devel = "8" +"hadoop/hadoop" = "3.3.6" + +[versions."4.0.0".build-arguments] +jmx-exporter-version = "1.3.0" +# Keep consistent with the dependency from Hadoop: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-aws/3.3.6 +aws-java-sdk-bundle-version = "1.12.367" +azure-storage-version = "7.0.1" +azure-keyvault-core-version = "1.0.0" + +[versions."4.0.1".local-images] +# Hive 4 must be built with Java 8 (according to GitHub README) but seems to run on Java 11 +java-base = "11" +java-devel = "8" +"hadoop/hadoop" = "3.3.6" + +[versions."4.0.1".build-arguments] +jmx-exporter-version = "1.3.0" +# Keep consistent with the dependency from Hadoop: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-aws/3.3.6 +aws-java-sdk-bundle-version = "1.12.367" +azure-storage-version = "7.0.1" +azure-keyvault-core-version = "1.0.0" diff --git a/java-base/Dockerfile b/java-base/Dockerfile index 397587d2f..610eb5a46 100644 --- a/java-base/Dockerfile +++ b/java-base/Dockerfile @@ -4,16 +4,16 @@ # # Provides the common Java Runtime for SDP products # -FROM stackable/image/vector +FROM local-image/vector -ARG PRODUCT -ARG RELEASE="1" +ARG PRODUCT_VERSION +ARG RELEASE_VERSION="1" LABEL name="Stackable image for OpenJDK" \ maintainer="info@stackable.tech" \ vendor="Stackable GmbH" \ - version="${PRODUCT}" \ - release="${RELEASE}" \ + version="${PRODUCT_VERSION}" \ + release="${RELEASE_VERSION}" \ summary="The Stackable OpenJDK base image." \ description="This image is the base image for all Stackable Java product images." @@ -32,7 +32,7 @@ EOF RUN microdnf update && \ microdnf install \ # Needed to run Java programs - "temurin-${PRODUCT}-jre" \ + "temurin-${PRODUCT_VERSION}-jre" \ # Needed, because otherwise e.g. Zookeeper fails with # Caused by: java.io.FileNotFoundException: /usr/lib/jvm/java-11-openjdk-11.0.20.0.8-2.el8.x86_64/lib/tzdb.dat (No such file or directory) tzdata-java \ @@ -43,7 +43,7 @@ RUN microdnf update && \ COPY java-base/licenses /licenses -ENV JAVA_HOME="/usr/lib/jvm/temurin-${PRODUCT}-jre" +ENV JAVA_HOME="/usr/lib/jvm/temurin-${PRODUCT_VERSION}-jre" # This image doesn't include the development packages for Java. # For images that need the devel package (ex. Spark) use this env variable to @@ -51,7 +51,7 @@ ENV JAVA_HOME="/usr/lib/jvm/temurin-${PRODUCT}-jre" # # microdnf install java-${JAVA_VERSION}-openjdk-devel # -ENV JAVA_VERSION=$PRODUCT +ENV JAVA_VERSION=$PRODUCT_VERSION # Mitigation for CVE-2021-44228 (Log4Shell) # This variable is supported as of Log4j version 2.10 and diff --git a/java-base/boil-config.toml b/java-base/boil-config.toml new file mode 100644 index 000000000..c7968ebf0 --- /dev/null +++ b/java-base/boil-config.toml @@ -0,0 +1,20 @@ +[versions."8".local-images] +vector = "0.47.0" + +[versions."11".local-images] +vector = "0.47.0" + +[versions."17".local-images] +vector = "0.47.0" + +[versions."21".local-images] +vector = "0.47.0" + +[versions."22".local-images] +vector = "0.47.0" + +[versions."23".local-images] +vector = "0.47.0" + +[versions."24".local-images] +vector = "0.47.0" diff --git a/java-devel/Dockerfile b/java-devel/Dockerfile index 8a4343aed..b88e26498 100644 --- a/java-devel/Dockerfile +++ b/java-devel/Dockerfile @@ -5,9 +5,9 @@ # Base image for builder stages in Java based products # -FROM stackable/image/stackable-devel +FROM local-image/stackable-devel -ARG PRODUCT +ARG PRODUCT_VERSION ARG STACKABLE_USER_UID # Find the latest version here: https://github.com/apache/maven @@ -46,7 +46,7 @@ microdnf install \ `# Needed by the maven ant run plugin for the "set-hostname-property" step in zookeeper` \ hostname \ `# Needed for compiling Java projects` \ - "temurin-${PRODUCT}-jdk" \ + "temurin-${PRODUCT_VERSION}-jdk" \ krb5-devel \ libcurl-devel \ make \ @@ -66,7 +66,7 @@ ln -s /opt/maven/bin/mvn /usr/bin/mvn EOF -ENV JAVA_HOME="/usr/lib/jvm/temurin-${PRODUCT}-jdk" +ENV JAVA_HOME="/usr/lib/jvm/temurin-${PRODUCT_VERSION}-jdk" ENV MAVEN_ARGS="--batch-mode --no-transfer-progress" COPY --chown=${STACKABLE_USER_UID}:0 java-devel/stackable/settings.xml /stackable/.m2/settings.xml diff --git a/java-devel/boil-config.toml b/java-devel/boil-config.toml new file mode 100644 index 000000000..b6d750b42 --- /dev/null +++ b/java-devel/boil-config.toml @@ -0,0 +1,20 @@ +[versions."8".local-images] +stackable-devel = "1.0.0" + +[versions."11".local-images] +stackable-devel = "1.0.0" + +[versions."17".local-images] +stackable-devel = "1.0.0" + +[versions."21".local-images] +stackable-devel = "1.0.0" + +[versions."22".local-images] +stackable-devel = "1.0.0" + +[versions."23".local-images] +stackable-devel = "1.0.0" + +[versions."24".local-images] +stackable-devel = "1.0.0" diff --git a/jdk-base/Dockerfile b/jdk-base/Dockerfile index 776804c1d..914ab6cf9 100644 --- a/jdk-base/Dockerfile +++ b/jdk-base/Dockerfile @@ -4,16 +4,16 @@ # # Provides the common Java Development Kit for SDP products # -FROM stackable/image/vector +FROM local-image/vector -ARG PRODUCT -ARG RELEASE="1" +ARG PRODUCT_VERSION +ARG RELEASE_VERSION="1" LABEL name="Stackable image for OpenJDK" \ maintainer="info@stackable.tech" \ vendor="Stackable GmbH" \ - version="${PRODUCT}" \ - release="${RELEASE}" \ + version="${PRODUCT_VERSION}" \ + release="${RELEASE_VERSION}" \ summary="The Stackable OpenJDK base image." \ description="This image is the base image for all Stackable Java product images which require a JDK." @@ -32,7 +32,7 @@ EOF RUN microdnf update && \ microdnf install \ # Needed to run Java programs - "temurin-${PRODUCT}-jdk" \ + "temurin-${PRODUCT_VERSION}-jdk" \ # Needed, because otherwise e.g. Zookeeper fails with # Caused by: java.io.FileNotFoundException: /usr/lib/jvm/java-11-openjdk-11.0.20.0.8-2.el8.x86_64/lib/tzdb.dat (No such file or directory) tzdata-java \ @@ -43,7 +43,7 @@ RUN microdnf update && \ COPY java-base/licenses /licenses -ENV JAVA_HOME="/usr/lib/jvm/temurin-${PRODUCT}-jdk" +ENV JAVA_HOME="/usr/lib/jvm/temurin-${PRODUCT_VERSION}-jdk" # This image doesn't include the development packages for Java. # For images that need the devel package (ex. Spark) use this env variable to @@ -51,7 +51,7 @@ ENV JAVA_HOME="/usr/lib/jvm/temurin-${PRODUCT}-jdk" # # microdnf install java-${JAVA_VERSION}-openjdk-devel # -ENV JAVA_VERSION=$PRODUCT +ENV JAVA_VERSION=$PRODUCT_VERSION # Mitigation for CVE-2021-44228 (Log4Shell) # This variable is supported as of Log4j version 2.10 and diff --git a/jdk-base/boil-config.toml b/jdk-base/boil-config.toml new file mode 100644 index 000000000..c7968ebf0 --- /dev/null +++ b/jdk-base/boil-config.toml @@ -0,0 +1,20 @@ +[versions."8".local-images] +vector = "0.47.0" + +[versions."11".local-images] +vector = "0.47.0" + +[versions."17".local-images] +vector = "0.47.0" + +[versions."21".local-images] +vector = "0.47.0" + +[versions."22".local-images] +vector = "0.47.0" + +[versions."23".local-images] +vector = "0.47.0" + +[versions."24".local-images] +vector = "0.47.0" diff --git a/kafka-testing-tools/Dockerfile b/kafka-testing-tools/Dockerfile index cbfb4299c..7f1e563d7 100644 --- a/kafka-testing-tools/Dockerfile +++ b/kafka-testing-tools/Dockerfile @@ -1,20 +1,20 @@ # syntax=docker/dockerfile:1.16.0@sha256:e2dd261f92e4b763d789984f6eab84be66ab4f5f08052316d8eb8f173593acf7 # check=error=true -FROM stackable/image/kafka/kcat AS kcat +FROM local-image/kafka/kcat AS kcat -FROM stackable/image/stackable-base AS final +FROM local-image/stackable-base AS final -ARG PRODUCT -ARG KAFKA_KCAT -ARG RELEASE +ARG PRODUCT_VERSION +ARG KAFKA_KCAT_VERSION +ARG RELEASE_VERSION ARG STACKABLE_USER_UID LABEL name="Kafka Testing Tools" \ maintainer="info@stackable.tech" \ vendor="Stackable GmbH" \ - version="${PRODUCT}" \ - release="${RELEASE}" \ + version="${PRODUCT_VERSION}" \ + release="${RELEASE_VERSION}" \ summary="The Stackable image for the kcat tool." \ description="Used for integration testing" @@ -30,9 +30,9 @@ RUN microdnf install \ && rm -rf /var/cache/yum # Store kcat version with binary name and add softlink -COPY --chown=${STACKABLE_USER_UID}:0 --from=kcat /stackable/kcat /stackable/kcat-${KAFKA_KCAT} -COPY --chown=${STACKABLE_USER_UID}:0 --from=kcat /stackable/kcat-${KAFKA_KCAT}-src.tar.gz /stackable -RUN ln -s /stackable/kcat-${KAFKA_KCAT} /stackable/kcat +COPY --chown=${STACKABLE_USER_UID}:0 --from=kcat /stackable/kcat /stackable/kcat-${KAFKA_KCAT_VERSION} +COPY --chown=${STACKABLE_USER_UID}:0 --from=kcat /stackable/kcat-${KAFKA_KCAT_VERSION}-src.tar.gz /stackable +RUN ln -s /stackable/kcat-${KAFKA_KCAT_VERSION} /stackable/kcat COPY --chown=${STACKABLE_USER_UID}:0 --from=kcat /licenses /licenses COPY --chown=${STACKABLE_USER_UID}:0 kafka-testing-tools/licenses /licenses diff --git a/kafka-testing-tools/boil-config.toml b/kafka-testing-tools/boil-config.toml new file mode 100644 index 000000000..ca78f8116 --- /dev/null +++ b/kafka-testing-tools/boil-config.toml @@ -0,0 +1,4 @@ +[versions."1.0.0".local-images] +stackable-base = "1.0.0" +"kafka/kcat" = "1.7.0" +java-base = "11" diff --git a/kafka/Dockerfile b/kafka/Dockerfile index e01f09fda..f9dffb9dd 100644 --- a/kafka/Dockerfile +++ b/kafka/Dockerfile @@ -1,15 +1,15 @@ # syntax=docker/dockerfile:1.16.0@sha256:e2dd261f92e4b763d789984f6eab84be66ab4f5f08052316d8eb8f173593acf7 # check=error=true -FROM stackable/image/kafka/kcat AS kcat -FROM stackable/image/kafka/kafka-opa-plugin AS kafka-opa-plugin +FROM local-image/kafka/kcat AS kcat +FROM local-image/kafka/kafka-opa-plugin AS kafka-opa-plugin -FROM stackable/image/java-devel AS kafka-builder +FROM local-image/java-devel AS kafka-builder -ARG PRODUCT -ARG RELEASE -ARG SCALA -ARG JMX_EXPORTER +ARG PRODUCT_VERSION +ARG RELEASE_VERSION +ARG SCALA_VERSION +ARG JMX_EXPORTER_VERSION ARG STACKABLE_USER_UID USER ${STACKABLE_USER_UID} @@ -17,15 +17,15 @@ WORKDIR /stackable COPY --chown=${STACKABLE_USER_UID}:0 kafka/stackable/jmx/ /stackable/jmx/ COPY --chown=${STACKABLE_USER_UID}:0 kafka/stackable/patches/patchable.toml /stackable/src/kafka/stackable/patches/patchable.toml -COPY --chown=${STACKABLE_USER_UID}:0 kafka/stackable/patches/${PRODUCT} /stackable/src/kafka/stackable/patches/${PRODUCT} +COPY --chown=${STACKABLE_USER_UID}:0 kafka/stackable/patches/${PRODUCT_VERSION} /stackable/src/kafka/stackable/patches/${PRODUCT_VERSION} RUN < /stackable/package_manifest.txt +rpm -qa --qf "%{NAME}-%{VERSION}-%{RELEASE_VERSION}\n" | sort > /stackable/package_manifest.txt chown ${STACKABLE_USER_UID}:0 /stackable/package_manifest.txt chmod g=u /stackable/package_manifest.txt rm -rf /var/cache/yum -ln -s /stackable/bin/kcat-${KAFKA_KCAT} /stackable/bin/kcat +ln -s /stackable/bin/kcat-${KAFKA_KCAT_VERSION} /stackable/bin/kcat chown -h ${STACKABLE_USER_UID}:0 /stackable/bin/kcat # kcat was located in /stackable/kcat - legacy ln -s /stackable/bin/kcat /stackable/kcat chown -h ${STACKABLE_USER_UID}:0 /stackable/kcat -ln -s /stackable/kafka_${SCALA}-${PRODUCT}-stackable${RELEASE} /stackable/kafka +ln -s /stackable/kafka_${SCALA_VERSION}-${PRODUCT_VERSION}-stackable${RELEASE_VERSION} /stackable/kafka chown -h ${STACKABLE_USER_UID}:0 /stackable/kafka # fix missing permissions chmod g=u /stackable/bin chmod g=u /stackable/jmx -chmod g=u /stackable/kafka_${SCALA}-${PRODUCT}-stackable${RELEASE} -chmod g=u /stackable/kafka_${SCALA}-${PRODUCT}-stackable${RELEASE}/libs/opa-authorizer-${KAFKA_KAFKA_OPA_PLUGIN}-all.jar +chmod g=u /stackable/kafka_${SCALA_VERSION}-${PRODUCT_VERSION}-stackable${RELEASE_VERSION} +chmod g=u /stackable/kafka_${SCALA_VERSION}-${PRODUCT_VERSION}-stackable${RELEASE_VERSION}/libs/opa-authorizer-${KAFKA_KAFKA_OPA_PLUGIN_VERSION}-all.jar chmod g=u /stackable/*-src.tar.gz EOF diff --git a/kafka/boil-config.toml b/kafka/boil-config.toml new file mode 100644 index 000000000..5347e10e4 --- /dev/null +++ b/kafka/boil-config.toml @@ -0,0 +1,39 @@ +[versions."3.7.2".local-images] +java-base = "21" +java-devel = "21" +"kafka/kcat" = "1.7.0" +"kafka/kafka-opa-plugin" = "1.5.1" + +[versions."3.7.2".build-arguments] +scala-version = "2.13" +jmx-exporter-version = "1.3.0" + +[versions."3.9.0".local-images] +java-base = "21" +java-devel = "21" +"kafka/kcat" = "1.7.0" +"kafka/kafka-opa-plugin" = "1.5.1" + +[versions."3.9.0".build-arguments] +scala-version = "2.13" +jmx-exporter-version = "1.3.0" + +[versions."3.9.1".local-images] +java-base = "21" +java-devel = "21" +"kafka/kcat" = "1.7.0" +"kafka/kafka-opa-plugin" = "1.5.1" + +[versions."3.9.1".build-arguments] +scala-version = "2.13" +jmx-exporter-version = "1.3.0" + +[versions."4.0.0".local-images] +java-base = "21" +java-devel = "21" +"kafka/kcat" = "1.7.0" +"kafka/kafka-opa-plugin" = "1.5.1" + +[versions."4.0.0".build-arguments] +scala-version = "2.13" +jmx-exporter-version = "1.3.0" diff --git a/kafka/kafka-opa-plugin/Dockerfile b/kafka/kafka-opa-plugin/Dockerfile index 2a5df3166..f963a2cb3 100644 --- a/kafka/kafka-opa-plugin/Dockerfile +++ b/kafka/kafka-opa-plugin/Dockerfile @@ -1,22 +1,22 @@ # syntax=docker/dockerfile:1.16.0@sha256:e2dd261f92e4b763d789984f6eab84be66ab4f5f08052316d8eb8f173593acf7 # check=error=true -FROM stackable/image/java-devel +FROM local-image/java-devel -ARG PRODUCT +ARG PRODUCT_VERSION ARG STACKABLE_USER_UID USER ${STACKABLE_USER_UID} WORKDIR /stackable COPY --chown=${STACKABLE_USER_UID}:0 kafka/kafka-opa-plugin/stackable/patches/patchable.toml /stackable/src/kafka/kafka-opa-plugin/stackable/patches/patchable.toml -COPY --chown=${STACKABLE_USER_UID}:0 kafka/kafka-opa-plugin/stackable/patches/${PRODUCT} /stackable/src/kafka/kafka-opa-plugin/stackable/patches/${PRODUCT} +COPY --chown=${STACKABLE_USER_UID}:0 kafka/kafka-opa-plugin/stackable/patches/${PRODUCT_VERSION} /stackable/src/kafka/kafka-opa-plugin/stackable/patches/${PRODUCT_VERSION} RUN <= 1.23.1) go install github.com/CycloneDX/cyclonedx-gomod/cmd/cyclonedx-gomod@v1.7.0 -cd "$(/stackable/patchable --images-repo-root=src checkout opa ${PRODUCT})" +cd "$(/stackable/patchable --images-repo-root=src checkout opa ${PRODUCT_VERSION})" -ORIGINAL_VERSION=${PRODUCT} -NEW_VERSION="${PRODUCT}-stackable${RELEASE}" +ORIGINAL_VERSION=${PRODUCT_VERSION} +NEW_VERSION="${PRODUCT_VERSION}-stackable${RELEASE_VERSION}" sed -i 's/var Version = "'${ORIGINAL_VERSION}'"/var Version = "'${NEW_VERSION}'"/g' v1/version/version.go @@ -102,24 +100,24 @@ mv opa /stackable/opa/ chmod -R g=u /stackable/opa /stackable/opa-${NEW_VERSION}-src.tar.gz EOF -FROM stackable/image/vector +FROM local-image/vector -ARG PRODUCT -ARG RELEASE +ARG PRODUCT_VERSION +ARG RELEASE_VERSION ARG STACKABLE_USER_UID LABEL name="Open Policy Agent" \ maintainer="info@stackable.tech" \ vendor="Stackable GmbH" \ - version="${PRODUCT}" \ - release="${RELEASE}" \ + version="${PRODUCT_VERSION}" \ + release="${RELEASE_VERSION}" \ summary="The Stackable image for Open Policy Agent (OPA)." \ description="This image is deployed by the Stackable Operator for OPA." COPY --chown=${STACKABLE_USER_UID}:0 opa/licenses /licenses COPY --from=opa-builder --chown=${STACKABLE_USER_UID}:0 /stackable/opa /stackable/opa -COPY --from=opa-builder --chown=${STACKABLE_USER_UID}:0 /stackable/opa-${PRODUCT}-stackable${RELEASE}-src.tar.gz /stackable/opa-${PRODUCT}-stackable${RELEASE}-src.tar.gz +COPY --from=opa-builder --chown=${STACKABLE_USER_UID}:0 /stackable/opa-${PRODUCT_VERSION}-stackable${RELEASE_VERSION}-src.tar.gz /stackable/opa-${PRODUCT_VERSION}-stackable${RELEASE_VERSION}-src.tar.gz COPY --from=multilog-builder --chown=${STACKABLE_USER_UID}:0 /daemontools/admin/daemontools/command/multilog /stackable/multilog RUN <= 1.23.1) go install github.com/CycloneDX/cyclonedx-gomod/cmd/cyclonedx-gomod@v1.7.0 -curl "https://repo.stackable.tech/repository/packages/statsd_exporter/statsd_exporter-${PRODUCT}.src.tar.gz" | tar -xzC . +curl "https://repo.stackable.tech/repository/packages/statsd_exporter/statsd_exporter-${PRODUCT_VERSION}.src.tar.gz" | tar -xzC . ( - cd "statsd_exporter-${PRODUCT}" || exit + cd "statsd_exporter-${PRODUCT_VERSION}" || exit # Unfortunately, we need to create a dummy Git repository to allow cyclonedx-gomod to determine the version of statsd_exporter git init git config user.email "fake.commiter@stackable.tech" git config user.name "Fake commiter" git commit --allow-empty --message "Fake commit, so that we can create a tag" - git tag "v${PRODUCT}" + git tag "v${PRODUCT_VERSION}" go build -o ../statsd_exporter - $GOPATH/bin/cyclonedx-gomod app -json -output-version 1.5 -output ../statsd_exporter-${PRODUCT}.cdx.json -packages -files + $GOPATH/bin/cyclonedx-gomod app -json -output-version 1.5 -output ../statsd_exporter-${PRODUCT_VERSION}.cdx.json -packages -files ) -rm -rf "statsd_exporter-${PRODUCT}" +rm -rf "statsd_exporter-${PRODUCT_VERSION}" EOF diff --git a/shared/statsd-exporter/boil-config.toml b/shared/statsd-exporter/boil-config.toml new file mode 100644 index 000000000..c2f9afcb2 --- /dev/null +++ b/shared/statsd-exporter/boil-config.toml @@ -0,0 +1,2 @@ +[versions."0.28.0".local-images] +stackable-base = "1.0.0" diff --git a/spark-connect-client/Dockerfile b/spark-connect-client/Dockerfile index 4750a071e..fd2c9e950 100644 --- a/spark-connect-client/Dockerfile +++ b/spark-connect-client/Dockerfile @@ -1,18 +1,18 @@ # syntax=docker/dockerfile:1.16.0@sha256:e2dd261f92e4b763d789984f6eab84be66ab4f5f08052316d8eb8f173593acf7 # spark-builder: provides client libs for spark-connect -FROM stackable/image/spark-k8s AS spark-builder +FROM local-image/spark-k8s AS spark-builder -ARG PRODUCT -ARG PYTHON -ARG RELEASE +ARG PRODUCT_VERSION +ARG PYTHON_VERSION +ARG RELEASE_VERSION ARG STACKABLE_USER_UID LABEL name="Stackable Spark Connect Examples" \ maintainer="info@stackable.tech" \ vendor="Stackable GmbH" \ - version="${PRODUCT}" \ - release="${RELEASE}" \ + version="${PRODUCT_VERSION}" \ + release="${RELEASE_VERSION}" \ summary="Spark Connect Examples" \ description="Spark Connect client libraries for Python and the JVM, including some examples." @@ -26,7 +26,7 @@ RUN < /stackable/package_manifest.txt +rpm -qa --qf "%{NAME}-%{VERSION}-%{RELEASE_VERSION}\n" | sort > /stackable/package_manifest.txt chown ${STACKABLE_USER_UID}:0 /stackable/package_manifest.txt chmod g=u /stackable/package_manifest.txt rm -rf /var/cache/yum -# Add link pointing from /stackable/zookeeper to /stackable/apache-zookeeper-${PRODUCT}-stackable${RELEASE}-bin/ +# Add link pointing from /stackable/zookeeper to /stackable/apache-zookeeper-${PRODUCT_VERSION}-stackable${RELEASE_VERSION}-bin/ # to preserve the folder name with the version. -ln -s /stackable/apache-zookeeper-${PRODUCT}-stackable${RELEASE}-bin/ /stackable/zookeeper +ln -s /stackable/apache-zookeeper-${PRODUCT_VERSION}-stackable${RELEASE_VERSION}-bin/ /stackable/zookeeper chown -h ${STACKABLE_USER_UID}:0 /stackable/zookeeper # fix missing permissions chmod g=u /stackable/jmx -chmod g=u /stackable/apache-zookeeper-${PRODUCT}-stackable${RELEASE}-bin/ +chmod g=u /stackable/apache-zookeeper-${PRODUCT_VERSION}-stackable${RELEASE_VERSION}-bin/ EOF # ---------------------------------------- diff --git a/zookeeper/boil-config.toml b/zookeeper/boil-config.toml new file mode 100644 index 000000000..64ab0416d --- /dev/null +++ b/zookeeper/boil-config.toml @@ -0,0 +1,6 @@ +[versions."3.9.3".local-images] +java-base = "17" +java-devel = "11" + +[versions."3.9.3".build-arguments] +jmx-exporter-version = "1.3.0" From 19738fd5434f3b558cdcc7ac1b792a9cb8849d92 Mon Sep 17 00:00:00 2001 From: Techassi Date: Tue, 12 Aug 2025 15:48:34 +0200 Subject: [PATCH 3/9] chore: Remove old Python config files --- airflow/versions.py | 54 ----------- conf.py | 128 ------------------------- druid/versions.py | 26 ----- hadoop/hadoop/versions.py | 13 --- hadoop/versions.py | 21 ---- hbase/hbase-opa-authorizer/versions.py | 7 -- hbase/hbase-operator-tools/versions.py | 20 ---- hbase/hbase/versions.py | 20 ---- hbase/phoenix/versions.py | 20 ---- hbase/versions.py | 28 ------ hive/versions.py | 38 -------- java-base/versions.py | 30 ------ java-devel/versions.py | 30 ------ jdk-base/versions.py | 30 ------ kafka-testing-tools/versions.py | 8 -- kafka/kafka-opa-plugin/versions.py | 6 -- kafka/kcat/versions.py | 7 -- kafka/versions.py | 38 -------- krb5/versions.py | 5 - nifi/versions.py | 24 ----- omid/versions.py | 14 --- opa/versions.py | 14 --- opensearch/security-plugin/versions.py | 6 -- opensearch/versions.py | 8 -- shared/statsd-exporter/versions.py | 6 -- spark-connect-client/versions.py | 8 -- spark-k8s/versions.py | 38 -------- stackable-base/versions.py | 6 -- stackable-devel/versions.py | 5 - superset/versions.py | 32 ------- testing-tools/versions.py | 6 -- tools/versions.py | 9 -- trino-cli/versions.py | 8 -- trino/storage-connector/versions.py | 17 ---- trino/trino/versions.py | 14 --- trino/versions.py | 23 ----- vector/versions.py | 8 -- zookeeper/versions.py | 12 --- 38 files changed, 787 deletions(-) delete mode 100644 airflow/versions.py delete mode 100644 conf.py delete mode 100644 druid/versions.py delete mode 100644 hadoop/hadoop/versions.py delete mode 100644 hadoop/versions.py delete mode 100644 hbase/hbase-opa-authorizer/versions.py delete mode 100644 hbase/hbase-operator-tools/versions.py delete mode 100644 hbase/hbase/versions.py delete mode 100644 hbase/phoenix/versions.py delete mode 100644 hbase/versions.py delete mode 100644 hive/versions.py delete mode 100644 java-base/versions.py delete mode 100644 java-devel/versions.py delete mode 100644 jdk-base/versions.py delete mode 100644 kafka-testing-tools/versions.py delete mode 100644 kafka/kafka-opa-plugin/versions.py delete mode 100644 kafka/kcat/versions.py delete mode 100644 kafka/versions.py delete mode 100644 krb5/versions.py delete mode 100644 nifi/versions.py delete mode 100644 omid/versions.py delete mode 100644 opa/versions.py delete mode 100644 opensearch/security-plugin/versions.py delete mode 100644 opensearch/versions.py delete mode 100644 shared/statsd-exporter/versions.py delete mode 100644 spark-connect-client/versions.py delete mode 100644 spark-k8s/versions.py delete mode 100644 stackable-base/versions.py delete mode 100644 stackable-devel/versions.py delete mode 100644 superset/versions.py delete mode 100644 testing-tools/versions.py delete mode 100644 tools/versions.py delete mode 100644 trino-cli/versions.py delete mode 100644 trino/storage-connector/versions.py delete mode 100644 trino/trino/versions.py delete mode 100644 trino/versions.py delete mode 100644 vector/versions.py delete mode 100644 zookeeper/versions.py diff --git a/airflow/versions.py b/airflow/versions.py deleted file mode 100644 index 9788be818..000000000 --- a/airflow/versions.py +++ /dev/null @@ -1,54 +0,0 @@ -versions = [ - { - "product": "2.9.3", - "python": "3.9", - "git_sync": "v4.4.1", - "s3fs": "2024.9.0", - "cyclonedx_bom": "6.0.0", - "shared/statsd-exporter": "0.28.0", - "tini": "0.19.0", - "vector": "0.47.0", - "uv": "0.7.8", - "extras": "async,amazon,celery,cncf.kubernetes,docker,dask,elasticsearch,ftp,grpc,hashicorp,http,ldap,google,google_auth,microsoft.azure,odbc,pandas,postgres,redis,sendgrid,sftp,slack,ssh,statsd,virtualenv,trino", - "opa_auth_manager": "airflow-2", - }, - { - "product": "2.10.4", - "python": "3.12", - "git_sync": "v4.4.1", - "s3fs": "2024.9.0", - "cyclonedx_bom": "6.0.0", - "shared/statsd-exporter": "0.28.0", - "tini": "0.19.0", - "vector": "0.47.0", - "uv": "0.7.8", - "extras": "async,amazon,celery,cncf.kubernetes,docker,dask,elasticsearch,ftp,grpc,hashicorp,http,ldap,google,google_auth,microsoft.azure,odbc,pandas,postgres,redis,sendgrid,sftp,slack,ssh,statsd,virtualenv,trino", - "opa_auth_manager": "airflow-2", - }, - { - "product": "2.10.5", - "python": "3.12", - "git_sync": "v4.4.1", - "s3fs": "2024.9.0", - "cyclonedx_bom": "6.0.0", - "shared/statsd-exporter": "0.28.0", - "tini": "0.19.0", - "vector": "0.47.0", - "uv": "0.7.8", - "extras": "async,amazon,celery,cncf.kubernetes,docker,dask,elasticsearch,ftp,grpc,hashicorp,http,ldap,google,google_auth,microsoft.azure,odbc,pandas,postgres,redis,sendgrid,sftp,slack,ssh,statsd,virtualenv,trino", - "opa_auth_manager": "airflow-2", - }, - { - "product": "3.0.1", - "python": "3.12", - "git_sync": "v4.4.1", - "s3fs": "2024.9.0", - "cyclonedx_bom": "6.0.0", - "shared/statsd-exporter": "0.28.0", - "tini": "0.19.0", - "vector": "0.47.0", - "uv": "0.7.8", - "extras": "async,amazon,celery,cncf-kubernetes,docker,elasticsearch,fab,ftp,grpc,hashicorp,http,ldap,google,microsoft-azure,odbc,pandas,postgres,redis,sendgrid,sftp,slack,ssh,statsd,trino", - "opa_auth_manager": "airflow-3", - }, -] diff --git a/conf.py b/conf.py deleted file mode 100644 index fa4c83017..000000000 --- a/conf.py +++ /dev/null @@ -1,128 +0,0 @@ -""" -Configuration file for the Stackable image-tools: https://github.com/stackabletech/image-tools. - -Application images will be created for products and associated versions configured here. -""" - -# NOTE: The .scripts/enumerate-product-versions.py script (used in the release workflow as of 2024-07-23) imports this file and it relies on conf.py being in its parent folder. Should this file be moved or the structure changed in any way remember to update that script as well! - -# NOTE (@NickLarsenNZ): Unfortunately, some directories have hyphens, so they need -# importing in a special way. For consistency, we'll do them all the same way. -import importlib - -airflow = importlib.import_module("airflow.versions") -druid = importlib.import_module("druid.versions") -hadoop = importlib.import_module("hadoop.versions") -hadoop_jars = importlib.import_module("hadoop.hadoop.versions") -hbase = importlib.import_module("hbase.versions") -hbase_jars = importlib.import_module("hbase.hbase.versions") -hbase_phoenix = importlib.import_module("hbase.phoenix.versions") -hbase_opa_authorizer = importlib.import_module("hbase.hbase-opa-authorizer.versions") -hbase_operator_tools = importlib.import_module("hbase.hbase-operator-tools.versions") -hive = importlib.import_module("hive.versions") -java_base = importlib.import_module("java-base.versions") -java_devel = importlib.import_module("java-devel.versions") -jdk_base = importlib.import_module("jdk-base.versions") -kafka = importlib.import_module("kafka.versions") -krb5 = importlib.import_module("krb5.versions") -vector = importlib.import_module("vector.versions") -nifi = importlib.import_module("nifi.versions") -omid = importlib.import_module("omid.versions") -opa = importlib.import_module("opa.versions") -opensearch = importlib.import_module("opensearch.versions") -opensearch_security_plugin = importlib.import_module( - "opensearch.security-plugin.versions" -) -spark_k8s = importlib.import_module("spark-k8s.versions") -stackable_base = importlib.import_module("stackable-base.versions") -stackable_devel = importlib.import_module("stackable-devel.versions") -superset = importlib.import_module("superset.versions") -trino_cli = importlib.import_module("trino-cli.versions") -trino = importlib.import_module("trino.versions") -trino_jars = importlib.import_module("trino.trino.versions") -trino_storage_connector = importlib.import_module("trino.storage-connector.versions") -kafka_testing_tools = importlib.import_module("kafka-testing-tools.versions") -kcat = importlib.import_module("kafka.kcat.versions") -kafka_opa_plugin = importlib.import_module("kafka.kafka-opa-plugin.versions") -testing_tools = importlib.import_module("testing-tools.versions") -zookeeper = importlib.import_module("zookeeper.versions") -tools = importlib.import_module("tools.versions") -statsd_exporter = importlib.import_module("shared.statsd-exporter.versions") -spark_connect_client = importlib.import_module("spark-connect-client.versions") - -products = [ - {"name": "airflow", "versions": airflow.versions}, - {"name": "druid", "versions": druid.versions}, - {"name": "hadoop", "versions": hadoop.versions}, - {"name": "hadoop/hadoop", "versions": hadoop_jars.versions}, - {"name": "hbase", "versions": hbase.versions}, - {"name": "hbase/hbase", "versions": hbase_jars.versions}, - {"name": "hbase/phoenix", "versions": hbase_phoenix.versions}, - {"name": "hbase/hbase-opa-authorizer", "versions": hbase_opa_authorizer.versions}, - {"name": "hbase/hbase-operator-tools", "versions": hbase_operator_tools.versions}, - {"name": "hive", "versions": hive.versions}, - {"name": "java-base", "versions": java_base.versions}, - {"name": "java-devel", "versions": java_devel.versions}, - {"name": "jdk-base", "versions": jdk_base.versions}, - {"name": "kafka", "versions": kafka.versions}, - {"name": "krb5", "versions": krb5.versions}, - {"name": "vector", "versions": vector.versions}, - {"name": "nifi", "versions": nifi.versions}, - {"name": "omid", "versions": omid.versions}, - {"name": "opa", "versions": opa.versions}, - {"name": "opensearch", "versions": opensearch.versions}, - { - "name": "opensearch/security-plugin", - "versions": opensearch_security_plugin.versions, - }, - {"name": "spark-k8s", "versions": spark_k8s.versions}, - {"name": "stackable-base", "versions": stackable_base.versions}, - {"name": "stackable-devel", "versions": stackable_devel.versions}, - {"name": "superset", "versions": superset.versions}, - {"name": "trino-cli", "versions": trino_cli.versions}, - {"name": "trino", "versions": trino.versions}, - {"name": "trino/trino", "versions": trino_jars.versions}, - {"name": "trino/storage-connector", "versions": trino_storage_connector.versions}, - {"name": "kafka-testing-tools", "versions": kafka_testing_tools.versions}, - {"name": "kafka/kcat", "versions": kcat.versions}, - {"name": "kafka/kafka-opa-plugin", "versions": kafka_opa_plugin.versions}, - {"name": "testing-tools", "versions": testing_tools.versions}, - {"name": "zookeeper", "versions": zookeeper.versions}, - {"name": "tools", "versions": tools.versions}, - {"name": "shared/statsd-exporter", "versions": statsd_exporter.versions}, - {"name": "spark-connect-client", "versions": spark_connect_client.versions}, -] - -open_shift_projects = { - "airflow": {"id": "62613f498ccb9938ba3cfde6"}, - "druid": {"id": "626140028ccb9938ba3cfde7"}, - "hadoop": {"id": "6261407f887d6e0b8614660c"}, - "hbase": {"id": "62614109992bac3f9a4a24b8"}, - "hive": {"id": "626140806812078a392dceaa"}, - "kafka": {"id": "625ff25b91bdcd4b49c823a4"}, - "nifi": {"id": "625586a32e9e14bc8118e203"}, - "opa": {"id": "6255838bea1feb8bec4aaaa3"}, - "opensearch": {"id": "6880fe690db664aa303d3a28"}, - "spark-k8s": {"id": "62613e81f8ce82a2f247dda5"}, - "superset": {"id": "62557e5fea1feb8bec4aaaa0"}, - "tools": {"id": "62557cd575ab7e30884aaaa0"}, - "trino": {"id": "62557c4a0030f6483318e203"}, - "zookeeper": {"id": "62552b0aadd9d54d56cda11d"}, -} - -cache = [ - { - "type": "registry", - "ref_prefix": "build-repo.stackable.tech:8083/sandbox/cache", - "mode": "max", - "compression": "zstd", - "ignore-error": "true", - }, -] - -args = { - "STACKABLE_USER_NAME": "stackable", - "STACKABLE_USER_UID": "1000", - "STACKABLE_USER_GID": "1000", - "DELETE_CACHES": "true", -} diff --git a/druid/versions.py b/druid/versions.py deleted file mode 100644 index e380f7c81..000000000 --- a/druid/versions.py +++ /dev/null @@ -1,26 +0,0 @@ -versions = [ - { - "product": "30.0.1", - # https://druid.apache.org/docs/30.0.1/operations/java/ - "java-base": "17", - "java-devel": "17", - "hadoop/hadoop": "3.3.6", - "authorizer": "0.7.0", - }, - { - "product": "31.0.1", - # https://druid.apache.org/docs/31.0.1/operations/java/ - "java-base": "17", - "java-devel": "17", - "hadoop/hadoop": "3.3.6", - "authorizer": "0.7.0", - }, - { - "product": "33.0.0", - # https://druid.apache.org/docs/33.0.0/operations/java/ - "java-base": "17", - "java-devel": "17", - "hadoop/hadoop": "3.3.6", - "authorizer": "0.7.0", - }, -] diff --git a/hadoop/hadoop/versions.py b/hadoop/hadoop/versions.py deleted file mode 100644 index d6907a86f..000000000 --- a/hadoop/hadoop/versions.py +++ /dev/null @@ -1,13 +0,0 @@ -versions = [ - { - # Not part of SDP 25.7.0, but still required for hbase, hive, spark-k8s - "product": "3.3.6", - "java-devel": "11", - "protobuf": "3.7.1", - }, - { - "product": "3.4.1", - "java-devel": "11", - "protobuf": "3.7.1", - }, -] diff --git a/hadoop/versions.py b/hadoop/versions.py deleted file mode 100644 index e88be1aca..000000000 --- a/hadoop/versions.py +++ /dev/null @@ -1,21 +0,0 @@ -versions = [ - { - # Not part of SDP 25.7.0, but still required for hbase, hive, spark-k8s - "product": "3.3.6", - "hadoop/hadoop": "3.3.6", - "java-base": "11", - "java-devel": "11", - "async_profiler": "2.9", - "jmx_exporter": "1.3.0", - "hdfs_utils": "0.4.0", - }, - { - "product": "3.4.1", - "hadoop/hadoop": "3.4.1", - "java-base": "11", - "java-devel": "11", - "async_profiler": "2.9", - "jmx_exporter": "1.3.0", - "hdfs_utils": "0.4.1", - }, -] diff --git a/hbase/hbase-opa-authorizer/versions.py b/hbase/hbase-opa-authorizer/versions.py deleted file mode 100644 index cd1f2934f..000000000 --- a/hbase/hbase-opa-authorizer/versions.py +++ /dev/null @@ -1,7 +0,0 @@ -versions = [ - { - "product": "0.1.0", - "java-devel": "11", - "delete_caches": "true", - }, -] diff --git a/hbase/hbase-operator-tools/versions.py b/hbase/hbase-operator-tools/versions.py deleted file mode 100644 index 248ecd343..000000000 --- a/hbase/hbase-operator-tools/versions.py +++ /dev/null @@ -1,20 +0,0 @@ -versions = [ - { - "product": "1.3.0-fd5a5fb-hbase2.6.1", - "hbase_operator_tools_version": "1.3.0-fd5a5fb", - "hadoop/hadoop": "3.3.6", - "hbase_thirdparty": "4.1.9", - "hbase/hbase": "2.6.1", - "java-devel": "11", - "delete_caches": "true", - }, - { - "product": "1.3.0-fd5a5fb-hbase2.6.2", - "hbase_operator_tools_version": "1.3.0-fd5a5fb", - "hadoop/hadoop": "3.4.1", - "hbase_thirdparty": "4.1.9", - "hbase/hbase": "2.6.2", - "java-devel": "11", - "delete_caches": "true", - }, -] diff --git a/hbase/hbase/versions.py b/hbase/hbase/versions.py deleted file mode 100644 index 205cedacb..000000000 --- a/hbase/hbase/versions.py +++ /dev/null @@ -1,20 +0,0 @@ -versions = [ - # Also do not merge java-base with java below as "JAVA-BASE is not a valid identifier" in Dockerfiles, it's unfortunate but to fix this would require a bigger refactoring of names or the image tools - # hbase-thirdparty is used to build the hbase-operator-tools and should be set to the version defined in the POM of HBase. - { - "product": "2.6.1", - "hadoop/hadoop": "3.3.6", - "java-base": "11", - "java-devel": "11", - "async_profiler": "2.9", - "delete_caches": "true", - }, - { - "product": "2.6.2", - "hadoop/hadoop": "3.4.1", - "java-base": "11", - "java-devel": "11", - "async_profiler": "2.9", - "delete_caches": "true", - }, -] diff --git a/hbase/phoenix/versions.py b/hbase/phoenix/versions.py deleted file mode 100644 index ed0e304e2..000000000 --- a/hbase/phoenix/versions.py +++ /dev/null @@ -1,20 +0,0 @@ -versions = [ - { - "product": "5.2.1-hbase2.6.1", - "phoenix_version": "5.2.1", - "hbase/hbase": "2.6.1", - "hadoop/hadoop": "3.3.6", - "java-devel": "11", - "hbase_profile": "2.6", - "delete_caches": "true", - }, - { - "product": "5.2.1-hbase2.6.2", - "phoenix_version": "5.2.1", - "hbase/hbase": "2.6.2", - "hadoop/hadoop": "3.4.1", - "java-devel": "11", - "hbase_profile": "2.6", - "delete_caches": "true", - }, -] diff --git a/hbase/versions.py b/hbase/versions.py deleted file mode 100644 index 3ea6dcdbf..000000000 --- a/hbase/versions.py +++ /dev/null @@ -1,28 +0,0 @@ -versions = [ - # Also do not merge java-base with java below as "JAVA-BASE is not a valid identifier" in Dockerfiles, it's unfortunate but to fix this would require a bigger refactoring of names or the image tools - # hbase-thirdparty is used to build the hbase-operator-tools and should be set to the version defined in the POM of HBase. - { - "product": "2.6.1", - "hbase/hbase": "2.6.1", - "hbase/hbase-operator-tools": "1.3.0-fd5a5fb-hbase2.6.1", - "hbase/phoenix": "5.2.1-hbase2.6.1", - "hbase/hbase-opa-authorizer": "0.1.0", # only for HBase 2.6.1 - "hadoop/hadoop": "3.3.6", - "java-base": "11", - "java-devel": "11", - "hbase_profile": "2.6", - "delete_caches": "true", - }, - { - "product": "2.6.2", - "hbase/hbase": "2.6.2", - "hbase/hbase-operator-tools": "1.3.0-fd5a5fb-hbase2.6.2", - "hbase/phoenix": "5.2.1-hbase2.6.2", - "hbase/hbase-opa-authorizer": "0.1.0", # only for HBase 2.6.1 - "hadoop/hadoop": "3.4.1", - "java-base": "11", - "java-devel": "11", - "hbase_profile": "2.6", - "delete_caches": "true", - }, -] diff --git a/hive/versions.py b/hive/versions.py deleted file mode 100644 index d28fa84be..000000000 --- a/hive/versions.py +++ /dev/null @@ -1,38 +0,0 @@ -versions = [ - { - "product": "3.1.3", - "jmx_exporter": "1.3.0", - # Hive 3 must be built with Java 8 but will run on Java 11 - "java-base": "11", - "java-devel": "8", - "hadoop/hadoop": "3.3.6", - # Keep consistent with the dependency from Hadoop: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-aws/3.3.6 - "aws_java_sdk_bundle": "1.12.367", - "azure_storage": "7.0.1", - "azure_keyvault_core": "1.0.0", - }, - { - "product": "4.0.0", - "jmx_exporter": "1.3.0", - # Hive 4 must be built with Java 8 (according to GitHub README) but seems to run on Java 11 - "java-base": "11", - "java-devel": "8", - "hadoop/hadoop": "3.3.6", - # Keep consistent with the dependency from Hadoop: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-aws/3.3.6 - "aws_java_sdk_bundle": "1.12.367", - "azure_storage": "7.0.1", - "azure_keyvault_core": "1.0.0", - }, - { - "product": "4.0.1", - "jmx_exporter": "1.3.0", - # Hive 4 must be built with Java 8 (according to GitHub README) but seems to run on Java 11 - "java-base": "11", - "java-devel": "8", - "hadoop/hadoop": "3.3.6", - # Keep consistent with the dependency from Hadoop: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-aws/3.3.6 - "aws_java_sdk_bundle": "1.12.367", - "azure_storage": "7.0.1", - "azure_keyvault_core": "1.0.0", - }, -] diff --git a/java-base/versions.py b/java-base/versions.py deleted file mode 100644 index 036d18203..000000000 --- a/java-base/versions.py +++ /dev/null @@ -1,30 +0,0 @@ -versions = [ - { - "product": "8", - "vector": "0.47.0", - }, - { - "product": "11", - "vector": "0.47.0", - }, - { - "product": "17", - "vector": "0.47.0", - }, - { - "product": "21", - "vector": "0.47.0", - }, - { - "product": "22", - "vector": "0.47.0", - }, - { - "product": "23", - "vector": "0.47.0", - }, - { - "product": "24", - "vector": "0.47.0", - }, -] diff --git a/java-devel/versions.py b/java-devel/versions.py deleted file mode 100644 index cd0838e4f..000000000 --- a/java-devel/versions.py +++ /dev/null @@ -1,30 +0,0 @@ -versions = [ - { - "product": "8", - "stackable-devel": "1.0.0", - }, - { - "product": "11", - "stackable-devel": "1.0.0", - }, - { - "product": "17", - "stackable-devel": "1.0.0", - }, - { - "product": "21", - "stackable-devel": "1.0.0", - }, - { - "product": "22", - "stackable-devel": "1.0.0", - }, - { - "product": "23", - "stackable-devel": "1.0.0", - }, - { - "product": "24", - "stackable-devel": "1.0.0", - }, -] diff --git a/jdk-base/versions.py b/jdk-base/versions.py deleted file mode 100644 index 036d18203..000000000 --- a/jdk-base/versions.py +++ /dev/null @@ -1,30 +0,0 @@ -versions = [ - { - "product": "8", - "vector": "0.47.0", - }, - { - "product": "11", - "vector": "0.47.0", - }, - { - "product": "17", - "vector": "0.47.0", - }, - { - "product": "21", - "vector": "0.47.0", - }, - { - "product": "22", - "vector": "0.47.0", - }, - { - "product": "23", - "vector": "0.47.0", - }, - { - "product": "24", - "vector": "0.47.0", - }, -] diff --git a/kafka-testing-tools/versions.py b/kafka-testing-tools/versions.py deleted file mode 100644 index b706d446e..000000000 --- a/kafka-testing-tools/versions.py +++ /dev/null @@ -1,8 +0,0 @@ -versions = [ - { - "product": "1.0.0", - "kafka/kcat": "1.7.0", - "java-base": "11", - "stackable-base": "1.0.0", - } -] diff --git a/kafka/kafka-opa-plugin/versions.py b/kafka/kafka-opa-plugin/versions.py deleted file mode 100644 index 602909cd2..000000000 --- a/kafka/kafka-opa-plugin/versions.py +++ /dev/null @@ -1,6 +0,0 @@ -versions = [ - { - "product": "1.5.1", - "java-devel": "11", - }, -] diff --git a/kafka/kcat/versions.py b/kafka/kcat/versions.py deleted file mode 100644 index 80f708acd..000000000 --- a/kafka/kcat/versions.py +++ /dev/null @@ -1,7 +0,0 @@ -versions = [ - { - "product": "1.7.0", - "java-devel": "11", - "stackable-devel": "1.0.0", - } -] diff --git a/kafka/versions.py b/kafka/versions.py deleted file mode 100644 index bf16adadb..000000000 --- a/kafka/versions.py +++ /dev/null @@ -1,38 +0,0 @@ -versions = [ - { - "product": "3.7.2", - "java-base": "21", - "java-devel": "21", - "scala": "2.13", - "kafka/kcat": "1.7.0", - "kafka/kafka-opa-plugin": "1.5.1", - "jmx_exporter": "1.3.0", - }, - { - "product": "3.9.0", - "java-base": "21", - "java-devel": "21", - "scala": "2.13", - "kafka/kcat": "1.7.0", - "kafka/kafka-opa-plugin": "1.5.1", - "jmx_exporter": "1.3.0", - }, - { - "product": "3.9.1", - "java-base": "21", - "java-devel": "21", - "scala": "2.13", - "kafka/kcat": "1.7.0", - "kafka/kafka-opa-plugin": "1.5.1", - "jmx_exporter": "1.3.0", - }, - { - "product": "4.0.0", - "java-base": "23", - "java-devel": "23", - "scala": "2.13", - "kafka/kcat": "1.7.0", - "kafka/kafka-opa-plugin": "1.5.1", - "jmx_exporter": "1.3.0", - }, -] diff --git a/krb5/versions.py b/krb5/versions.py deleted file mode 100644 index fcf6ef066..000000000 --- a/krb5/versions.py +++ /dev/null @@ -1,5 +0,0 @@ -versions = [ - { - "product": "1.21.1", - }, -] diff --git a/nifi/versions.py b/nifi/versions.py deleted file mode 100644 index ea9e26360..000000000 --- a/nifi/versions.py +++ /dev/null @@ -1,24 +0,0 @@ -versions = [ - { - "product": "1.27.0", - "java-base": "11", - "java-devel": "11", # There is an error when trying to use the jdk 21 (since nifi 1.26.0) - "git_sync": "v4.4.1", - "nifi_opa_authorizer_plugin": "0.1.0", - }, - { - "product": "1.28.1", - "java-base": "11", - "java-devel": "11", - "git_sync": "v4.4.1", - "nifi_opa_authorizer_plugin": "0.1.0", - }, - { - "product": "2.4.0", - "java-base": "21", - "java-devel": "21", - "git_sync": "v4.4.1", - "nifi_iceberg_bundle": "0.0.4", - "nifi_opa_authorizer_plugin": "0.1.0", - }, -] diff --git a/omid/versions.py b/omid/versions.py deleted file mode 100644 index 533cb91be..000000000 --- a/omid/versions.py +++ /dev/null @@ -1,14 +0,0 @@ -versions = [ - { - "product": "1.1.2", - "java-base": "11", - "java-devel": "11", - "jmx_exporter": "1.3.0", - }, - { - "product": "1.1.3", - "java-base": "11", - "java-devel": "11", - "jmx_exporter": "1.3.0", - }, -] diff --git a/opa/versions.py b/opa/versions.py deleted file mode 100644 index 64589586a..000000000 --- a/opa/versions.py +++ /dev/null @@ -1,14 +0,0 @@ -versions = [ - { - "product": "1.4.2", - "vector": "0.47.0", - "golang": "1.23.9", - "stackable-devel": "1.0.0", - }, - { - "product": "1.0.1", - "vector": "0.47.0", - "golang": "1.23.9", - "stackable-devel": "1.0.0", - }, -] diff --git a/opensearch/security-plugin/versions.py b/opensearch/security-plugin/versions.py deleted file mode 100644 index a46fa4c9f..000000000 --- a/opensearch/security-plugin/versions.py +++ /dev/null @@ -1,6 +0,0 @@ -versions = [ - { - "product": "3.1.0.0", - "java-devel": "21", - }, -] diff --git a/opensearch/versions.py b/opensearch/versions.py deleted file mode 100644 index 708e430d7..000000000 --- a/opensearch/versions.py +++ /dev/null @@ -1,8 +0,0 @@ -versions = [ - { - "product": "3.1.0", - "java-devel": "21", - "jdk-base": "21", - "opensearch/security-plugin": "3.1.0.0", - }, -] diff --git a/shared/statsd-exporter/versions.py b/shared/statsd-exporter/versions.py deleted file mode 100644 index adb26f18a..000000000 --- a/shared/statsd-exporter/versions.py +++ /dev/null @@ -1,6 +0,0 @@ -versions = [ - { - "product": "0.28.0", - "stackable-base": "1.0.0", - } -] diff --git a/spark-connect-client/versions.py b/spark-connect-client/versions.py deleted file mode 100644 index be7977a85..000000000 --- a/spark-connect-client/versions.py +++ /dev/null @@ -1,8 +0,0 @@ -versions = [ - { - "product": "3.5.6", - "spark-k8s": "3.5.6", - "java-base": "17", - "python": "3.11", - }, -] diff --git a/spark-k8s/versions.py b/spark-k8s/versions.py deleted file mode 100644 index fe6405702..000000000 --- a/spark-k8s/versions.py +++ /dev/null @@ -1,38 +0,0 @@ -versions = [ - { - "product": "3.5.5", - "java-base": "17", - "java-devel": "17", - "python": "3.11", - "hadoop/hadoop": "3.4.1", # Current Stackable LTS version. Source of the AWS and Azure artifacts to Spark's classpath. - "hbase": "2.6.2", # Current Stackable LTS version. Used to build the HBase connector. - "aws_java_sdk_bundle": "2.24.6", # https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-aws/3.4.1 - "azure_storage": "7.0.1", # https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-azure/3.4.1 - "azure_keyvault_core": "1.0.0", # https://mvnrepository.com/artifact/com.microsoft.azure/azure-storage/7.0.1 - "jackson_dataformat_xml": "2.15.2", # https://mvnrepository.com/artifact/org.apache.spark/spark-core_2.13/3.5.2 - "stax2_api": "4.2.1", # https://mvnrepository.com/artifact/com.fasterxml.jackson.dataformat/jackson-dataformat-xml/2.15.2 - "woodstox_core": "6.5.1", # https://mvnrepository.com/artifact/com.fasterxml.jackson.dataformat/jackson-dataformat-xml/2.15.2 - "vector": "0.47.0", - "jmx_exporter": "1.3.0", - "tini": "0.19.0", - "hbase_connector": "1.0.1", - }, - { - "product": "3.5.6", - "java-base": "17", - "java-devel": "17", - "python": "3.11", - "hadoop/hadoop": "3.4.1", # Current Stackable LTS version. Source of the AWS and Azure artifacts to Spark's classpath. - "hbase": "2.6.2", # Current Stackable LTS version. Used to build the HBase connector. - "aws_java_sdk_bundle": "2.24.6", # https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-aws/3.4.1 - "azure_storage": "7.0.1", # https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-azure/3.4.1 - "azure_keyvault_core": "1.0.0", # https://mvnrepository.com/artifact/com.microsoft.azure/azure-storage/7.0.1 - "jackson_dataformat_xml": "2.15.2", # https://mvnrepository.com/artifact/org.apache.spark/spark-core_2.13/3.5.2 - "stax2_api": "4.2.1", # https://mvnrepository.com/artifact/com.fasterxml.jackson.dataformat/jackson-dataformat-xml/2.15.2 - "woodstox_core": "6.5.1", # https://mvnrepository.com/artifact/com.fasterxml.jackson.dataformat/jackson-dataformat-xml/2.15.2 - "vector": "0.47.0", - "jmx_exporter": "1.3.0", - "tini": "0.19.0", - "hbase_connector": "1.0.1", - }, -] diff --git a/stackable-base/versions.py b/stackable-base/versions.py deleted file mode 100644 index 6f6bf1ca7..000000000 --- a/stackable-base/versions.py +++ /dev/null @@ -1,6 +0,0 @@ -versions = [ - { - "product": "1.0.0", - "stackable-devel": "1.0.0", - }, -] diff --git a/stackable-devel/versions.py b/stackable-devel/versions.py deleted file mode 100644 index ed192d92f..000000000 --- a/stackable-devel/versions.py +++ /dev/null @@ -1,5 +0,0 @@ -versions = [ - { - "product": "1.0.0", - }, -] diff --git a/superset/versions.py b/superset/versions.py deleted file mode 100644 index 4ef1f3e6c..000000000 --- a/superset/versions.py +++ /dev/null @@ -1,32 +0,0 @@ -versions = [ - { - "product": "4.0.2", - "python": "3.9", - "cyclonedx_bom": "6.0.0", - "vector": "0.47.0", - "shared/statsd-exporter": "0.28.0", - "authlib": "1.2.1", # https://github.com/dpgaspar/Flask-AppBuilder/blob/release/4.4.1/requirements/extra.txt#L7 - "stackable-base": "1.0.0", - "uv": "0.7.3", - }, - { - "product": "4.1.1", - "python": "3.9", # 3.11 support was merged in January 2025 (two months after 4.1.1 release), 3.10 is not available in our UBI image, so we need to stay on 3.9 for now - "cyclonedx_bom": "6.0.0", - "vector": "0.47.0", - "shared/statsd-exporter": "0.28.0", - "authlib": "1.2.1", # https://github.com/dpgaspar/Flask-AppBuilder/blob/release/4.5.0/requirements/extra.txt#L7 - "stackable-base": "1.0.0", - "uv": "0.7.3", - }, - { - "product": "4.1.2", - "python": "3.9", - "cyclonedx_bom": "6.0.0", - "vector": "0.47.0", - "shared/statsd-exporter": "0.28.0", - "authlib": "1.2.1", # https://github.com/dpgaspar/Flask-AppBuilder/blob/release/4.5.0/requirements/extra.txt#L7 - "stackable-base": "1.0.0", - "uv": "0.7.3", - }, -] diff --git a/testing-tools/versions.py b/testing-tools/versions.py deleted file mode 100644 index 5231af218..000000000 --- a/testing-tools/versions.py +++ /dev/null @@ -1,6 +0,0 @@ -versions = [ - { - "product": "0.2.0", - "keycloak_version": "23.0.0", - } -] diff --git a/tools/versions.py b/tools/versions.py deleted file mode 100644 index 78269452c..000000000 --- a/tools/versions.py +++ /dev/null @@ -1,9 +0,0 @@ -versions = [ - { - "product": "1.0.0", - "kubectl_version": "1.33.0", - "jq_version": "1.7.1", - "stackable-base": "1.0.0", - "yq_version": "4.45.2", - }, -] diff --git a/trino-cli/versions.py b/trino-cli/versions.py deleted file mode 100644 index 828f6f625..000000000 --- a/trino-cli/versions.py +++ /dev/null @@ -1,8 +0,0 @@ -# This image is only used in integration tests and demos. -# It's therefore ok if we only support a single version at a time. -versions = [ - { - "product": "476", - "java-base": "24", - }, -] diff --git a/trino/storage-connector/versions.py b/trino/storage-connector/versions.py deleted file mode 100644 index d9fe35c51..000000000 --- a/trino/storage-connector/versions.py +++ /dev/null @@ -1,17 +0,0 @@ -versions = [ - { - "product": "451", - "trino/trino": "451", - "java-devel": "22", - }, - { - "product": "470", - "trino/trino": "470", - "java-devel": "23", - }, - { - "product": "476", - "trino/trino": "476", - "java-devel": "24", - }, -] diff --git a/trino/trino/versions.py b/trino/trino/versions.py deleted file mode 100644 index 3c67dc2b3..000000000 --- a/trino/trino/versions.py +++ /dev/null @@ -1,14 +0,0 @@ -versions = [ - { - "product": "451", - "java-devel": "22", - }, - { - "product": "470", - "java-devel": "23", - }, - { - "product": "476", - "java-devel": "24", - }, -] diff --git a/trino/versions.py b/trino/versions.py deleted file mode 100644 index bb9a9dac8..000000000 --- a/trino/versions.py +++ /dev/null @@ -1,23 +0,0 @@ -versions = [ - { - "product": "451", - "java-base": "22", - "trino/trino": "451", - "jmx_exporter": "1.3.0", - "trino/storage-connector": "451", - }, - { - "product": "470", - "java-base": "23", - "trino/trino": "470", - "jmx_exporter": "1.3.0", - "trino/storage-connector": "470", - }, - { - "product": "476", - "java-base": "24", - "trino/trino": "476", - "jmx_exporter": "1.3.0", - "trino/storage-connector": "476", - }, -] diff --git a/vector/versions.py b/vector/versions.py deleted file mode 100644 index 828d3fc2d..000000000 --- a/vector/versions.py +++ /dev/null @@ -1,8 +0,0 @@ -versions = [ - { - "product": "0.47.0", - "rpm_release": "1", - "stackable-base": "1.0.0", - "inotify_tools": "3.22.1.0-1.el9", - } -] diff --git a/zookeeper/versions.py b/zookeeper/versions.py deleted file mode 100644 index d63bfac5a..000000000 --- a/zookeeper/versions.py +++ /dev/null @@ -1,12 +0,0 @@ -versions = [ - { - "product": "3.9.3", - "java-base": "17", - # NOTE (@NickLarsenNZ): Builds fail on Java 17, with the output: - # [ERROR] Failed to execute goal com.github.spotbugs:spotbugs-maven-plugin:4.0.0:spotbugs (spotbugs) on project - # zookeeper: Execution spotbugs of goal com.github.spotbugs:spotbugs-maven-plugin:4.0.0:spotbugs failed: Java - # returned: 1 -> [Help 1] - "java-devel": "11", - "jmx_exporter": "1.3.0", - }, -] From 9a6ab65d8431bf1fe6c61f61d235ed3a09f8f2c4 Mon Sep 17 00:00:00 2001 From: Techassi Date: Tue, 12 Aug 2025 15:49:01 +0200 Subject: [PATCH 4/9] ci: Adjust notification condition Only send out notifications if the build failed, a build is re-run and wasn't cancelled. --- .github/workflows/reusable_build_image.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/reusable_build_image.yaml b/.github/workflows/reusable_build_image.yaml index a11cb8c80..5ae205d65 100644 --- a/.github/workflows/reusable_build_image.yaml +++ b/.github/workflows/reusable_build_image.yaml @@ -102,7 +102,7 @@ jobs: name: Failure Notification needs: [generate_matrix, build, publish_manifests] runs-on: ubuntu-latest - if: failure() || github.run_attempt > 1 + if: failure() || (github.run_attempt > 1 && !cancelled()) steps: - name: Send Notification uses: stackabletech/actions/send-slack-notification@55d2f9fcbcd7884ac929ea65fd6f069e7b7a49d2 # v0.8.1 From 75194b270ad5c9231655dab07210280044b4ec28 Mon Sep 17 00:00:00 2001 From: Techassi Date: Wed, 13 Aug 2025 09:03:14 +0200 Subject: [PATCH 5/9] chore: Remove commented-out code --- rust/boil/src/build/bakefile.rs | 47 --------------------------------- 1 file changed, 47 deletions(-) diff --git a/rust/boil/src/build/bakefile.rs b/rust/boil/src/build/bakefile.rs index e56896888..4727dc50d 100644 --- a/rust/boil/src/build/bakefile.rs +++ b/rust/boil/src/build/bakefile.rs @@ -429,50 +429,3 @@ impl BakefileTarget { pub struct BakefileGroup { targets: Vec, } - -// #[derive(Debug, Default)] -// pub struct Graph { -// targets: BTreeMap>, -// } - -// impl Graph { -// pub fn all() -> Self { -// let image_config_paths: Vec<_> = glob("./**/boil-config.toml") -// .expect("glob pattern must be valid") -// .filter_map(Result::ok) -// .collect(); - -// let mut targets = Self::default(); - -// for image_config_path in image_config_paths { -// let image_config = ImageConfig::from_file(&image_config_path).unwrap(); - -// let (image_name, _) = image_config_path -// .to_str() -// .unwrap() -// .rsplit_once('/') -// .unwrap(); - -// let pairs = image_config.filter_by_version(None).unwrap(); - -// targets.insert_targets(image_name.to_owned(), pairs); -// } - -// targets -// } - -// fn insert_targets( -// &mut self, -// image_name: String, -// pairs: Vec, -// ) -> Vec { -// let mut nodes = Vec::new(); - -// for VersionOptionsPair { version, options } in pairs { -// let key = format!("{image_name}:{version}"); -// let child_nodes = Vec::new(); - -// // let nodes = self.insert_targets(image_name, pairs); -// } -// } -// } From 6db36f65e60088139c9371601562a4ffd2b64046 Mon Sep 17 00:00:00 2001 From: Techassi Date: Wed, 13 Aug 2025 09:04:54 +0200 Subject: [PATCH 6/9] chore: Remove unwraps --- rust/boil/src/build/bakefile.rs | 9 ++++++--- rust/boil/src/build/docker.rs | 8 ++++++-- rust/boil/src/build/mod.rs | 5 ++++- rust/boil/src/show/images.rs | 7 +++++-- 4 files changed, 21 insertions(+), 8 deletions(-) diff --git a/rust/boil/src/build/bakefile.rs b/rust/boil/src/build/bakefile.rs index 4727dc50d..d903f2dc1 100644 --- a/rust/boil/src/build/bakefile.rs +++ b/rust/boil/src/build/bakefile.rs @@ -7,7 +7,7 @@ use std::{ use glob::glob; use serde::Serialize; -use snafu::{ResultExt, Snafu}; +use snafu::{OptionExt, ResultExt, Snafu}; use time::format_description::well_known::Rfc3339; use url::Host; @@ -35,6 +35,9 @@ pub enum GitError { #[snafu(display("failed to parse HEAD revision"))] ParseHeadRevision { source: git2::Error }, + + #[snafu(display("failed to find starting point of rev range"))] + InvalidRange, } #[derive(Debug, Snafu)] @@ -366,7 +369,6 @@ impl Bakefile { /// Formats and return the context name, eg. `stackable/image/stackable-base-1_0_0`. fn format_context_name(name: &str) -> String { format!("local-image/{name}") - // format!("stackable/image/{name}") } /// Formats and returns the context target name, eg. `target:stackable-base-1_0_0`. @@ -384,8 +386,9 @@ impl Bakefile { fn git_head_revision() -> Result { let repo = git2::Repository::open(".").context(OpenRepositorySnafu)?; let rev = repo.revparse("HEAD").context(ParseHeadRevisionSnafu)?; + let rev = rev.from().context(InvalidRangeSnafu)?.id().to_string(); - Ok(rev.from().unwrap().id().to_string()) + Ok(rev) } } diff --git a/rust/boil/src/build/docker.rs b/rust/boil/src/build/docker.rs index 38f10e3cb..70fa77657 100644 --- a/rust/boil/src/build/docker.rs +++ b/rust/boil/src/build/docker.rs @@ -6,10 +6,14 @@ use std::{ }; use serde::{Deserialize, Serialize, de::Visitor, ser::SerializeMap}; -use snafu::{Snafu, ensure}; +use snafu::{OptionExt, Snafu, ensure}; #[derive(Debug, Snafu)] pub enum ParseBuildArgumentError { + #[snafu(display("invalid format, expected ="))] + InvalidFormat, + + #[snafu(display("encountered non ASCII characters"))] NonAscii, } @@ -29,7 +33,7 @@ impl FromStr for BuildArgument { fn from_str(s: &str) -> Result { ensure!(s.is_ascii(), NonAsciiSnafu); - let (key, value) = s.split_once('=').unwrap(); + let (key, value) = s.split_once('=').context(InvalidFormatSnafu)?; let key = key.replace(['-', '/'], "_").to_uppercase(); Ok(Self((key, value.to_owned()))) diff --git a/rust/boil/src/build/mod.rs b/rust/boil/src/build/mod.rs index aa3753f4b..ef5cae5b9 100644 --- a/rust/boil/src/build/mod.rs +++ b/rust/boil/src/build/mod.rs @@ -33,6 +33,9 @@ pub enum Error { #[snafu(display("failed to run child process"))] RunChildProcess { source: std::io::Error }, + #[snafu(display("failed to spawn child process"))] + SpawnChildProcess { source: std::io::Error }, + #[snafu(display("encountered invalid image version, must not include any build metadata"))] InvalidImageVersion, } @@ -74,7 +77,7 @@ pub fn run_command(args: BuildArguments, config: Config) -> Result<(), Error> { .arg("-") .stdin(Stdio::piped()) .spawn() - .unwrap(); + .context(SpawnChildProcessSnafu)?; let stdin_handle = child.stdin.take().with_context(|| { child diff --git a/rust/boil/src/show/images.rs b/rust/boil/src/show/images.rs index 3baaa3929..5af7daf16 100644 --- a/rust/boil/src/show/images.rs +++ b/rust/boil/src/show/images.rs @@ -2,17 +2,20 @@ use std::collections::BTreeMap; use snafu::{ResultExt, Snafu}; -use crate::build::bakefile::{Targets, TargetsOptions}; +use crate::build::bakefile::{Targets, TargetsError, TargetsOptions}; #[derive(Debug, Snafu)] pub enum Error { #[snafu(display("failed to serialize list as JSON"))] SerializeList { source: serde_json::Error }, + + #[snafu(display("failed to build list of targets"))] + BuildTargets { source: TargetsError }, } pub fn run_command() -> Result<(), Error> { let list: BTreeMap<_, _> = Targets::all(TargetsOptions { only_entry: true }) - .unwrap() + .context(BuildTargetsSnafu)? .into_iter() .map(|(image_name, image_versions)| { let versions: Vec<_> = image_versions From edaacb9266265d2230d7a3a03110d2f1463dad82 Mon Sep 17 00:00:00 2001 From: Techassi Date: Fri, 15 Aug 2025 11:40:08 +0200 Subject: [PATCH 7/9] chore: Add cargo alias for boil --- .cargo/config.toml | 1 + rust/boil/README.md | 2 ++ 2 files changed, 3 insertions(+) diff --git a/.cargo/config.toml b/.cargo/config.toml index b6cf3ff25..54e07b717 100644 --- a/.cargo/config.toml +++ b/.cargo/config.toml @@ -1,2 +1,3 @@ [alias] patchable = ["run", "--bin", "patchable", "--"] +boil = ["run", "--bin", "boil", "--"] diff --git a/rust/boil/README.md b/rust/boil/README.md index e29b38986..a53c2f3af 100644 --- a/rust/boil/README.md +++ b/rust/boil/README.md @@ -8,6 +8,8 @@ boil builds container images in parallel. ## Quick Overview +Either compile and run the binary, or use the `cargo boil` alias. + ```shell # Builds all version of the image located in the 'airflow' folder boil build airflow From 84184ec79fa49a8465174e4184a09184b2717eb4 Mon Sep 17 00:00:00 2001 From: Techassi Date: Fri, 15 Aug 2025 11:42:10 +0200 Subject: [PATCH 8/9] chore: Update README to mention boil --- README.md | 43 +++++++++++++------------------------------ 1 file changed, 13 insertions(+), 30 deletions(-) diff --git a/README.md b/README.md index a5f2602ea..fa313e425 100644 --- a/README.md +++ b/README.md @@ -15,45 +15,28 @@ This repository contains Dockerfiles and scripts to build base images for use wi ## Prerequisites -* [Stackable Image Tools](https://github.com/stackabletech/image-tools) (`pip install image-tools-stackabletech`) +* [boil](./rust/boil//README.md) (`cargo boil`) * Docker including the [`buildx` plugin](https://github.com/docker/buildx) * Optional: [OpenShift preflight tool](https://github.com/redhat-openshift-ecosystem/openshift-preflight) to verify an image for OpenShift -## Build Product Images +## Build Images -Product images are published to the `oci.stackable.tech` registry under the `sdp` organization by default. +Images are published to the `oci.stackable.tech` registry under the `sdp` organization by default. -### Build single products locally +### Build images locally -To build and push product images to the default repository use this command: +Consult the [boil README](./rust/boil//README.md) which contains a broad selection of different commands to build images locally. -```sh -bake --product zookeeper --image 0.0.0-dev --push -``` - -This will build images for Apache ZooKeeper versions as defined in the `conf.py` file, tag them with the `image-version` 0.0.0-dev and push them to the registry. - -You can select a specific version of a product to build using the syntax `product=version` e.g. to build Hive 3.1.3 you can use this command: - -```sh -bake --product hive=3.1.3 -i 0.0.0-dev -``` - -> [!NOTE] -> `-i` is the shorthand for `--image` (i.e. the resulting image tag) - -### Build all products locally - -To build all products in all versions locally you can use this command - -```sh -bake --image-version 0.0.0-dev -``` +### Build images via GitHub Actions -### Build everything in GitHub +There are individual GHA workflows (one for each image) which use a +[reusable workflow](.github/workflows/reusable_build_image.yaml) to build all specified versions for +both `amd64` and `arm64`. The workflow is triggered -The GitHub action called `Build (and optionally publish) 0.0.0-dev images` can be triggered manually to do build all images in all versions. -When triggered manually it will _not_ push the images to the registry. +* by pushes to `main` to produce `0.0.0-dev` versions of the images, +* by a regular schedule to rebuild `0.0.0-dev` versions of the images to avoid staleness, +* by tag pushes to produce (release candidate) images for a particular SDP, +* and by manual workflow dispatches. ## Patches From f439e99b1052ddb9408bc4e9dd821d54e53c1ebb Mon Sep 17 00:00:00 2001 From: Techassi Date: Fri, 15 Aug 2025 14:04:46 +0200 Subject: [PATCH 9/9] chore: Update README --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index fa313e425..efd84522c 100644 --- a/README.md +++ b/README.md @@ -35,7 +35,7 @@ both `amd64` and `arm64`. The workflow is triggered * by pushes to `main` to produce `0.0.0-dev` versions of the images, * by a regular schedule to rebuild `0.0.0-dev` versions of the images to avoid staleness, -* by tag pushes to produce (release candidate) images for a particular SDP, +* by tag pushes to produce (release candidate) images for a particular SDP release, * and by manual workflow dispatches. ## Patches