diff --git a/CHANGELOG.md b/CHANGELOG.md index a588f688..aeb468ef 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,6 +13,11 @@ - BREAKING: `configOverrides` now only accepts the known config file `webserver_config.py`. Previously, arbitrary file names were silently accepted and ignored ([#775]). - Bump `stackable-operator` to 0.110.1, kube to 3.1.0, and snafu to 0.9 ([#775]). - BREAKING: Rename `EXPERIMENTAL_FILE_HEADER` and `EXPERIMENTAL_FILE_FOOTER` in `webserver_config.py` for arbitrary python code to `FILE_HEADER` and `FILE_FOOTER` ([#775], [#777]). +- BREAKING: The `.clusterConfig.credentialsSecret` field has been renamed to `.clusterConfig.credentialsSecretName` for consistency ([#754]). +- BREAKING: Implement generic database connection. + This means you need to replace your simple database connection string with a typed struct. + This struct is consistent between different CRDs, so that you can easily copy/paste it between stacklets. + Read on the [Airflow database documentation](https://docs.stackable.tech/home/nightly/airflow/usage-guide/database-connections) for details ([#754]). ### Fixed @@ -25,6 +30,7 @@ [#774]: https://github.com/stackabletech/airflow-operator/pull/774 [#775]: https://github.com/stackabletech/airflow-operator/pull/775 [#777]: https://github.com/stackabletech/airflow-operator/pull/777 +[#754]: https://github.com/stackabletech/airflow-operator/pull/754 ## [26.3.0] - 2026-03-16 diff --git a/Cargo.lock b/Cargo.lock index e8d63bf9..3c4d4890 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -392,11 +392,12 @@ checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" [[package]] name = "const_format" -version = "0.2.35" +version = "0.2.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7faa7469a93a566e9ccc1c73fe783b4a65c274c5ace346038dca9c39fe0030ad" +checksum = "4481a617ad9a412be3b97c5d403fef8ed023103368908b9c50af598ff467cc1e" dependencies = [ "const_format_proc_macros", + "konst", ] [[package]] @@ -1531,6 +1532,21 @@ dependencies = [ "snafu 0.9.0", ] +[[package]] +name = "konst" +version = "0.2.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "128133ed7824fcd73d6e7b17957c5eb7bacb885649bd8c69708b2331a10bcefb" +dependencies = [ + "konst_macro_rules", +] + +[[package]] +name = "konst_macro_rules" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4933f3f57a8e9d9da04db23fb153356ecaf00cbd14aee46279c33dc80925c37" + [[package]] name = "kube" version = "3.1.0" @@ -1773,7 +1789,7 @@ dependencies = [ "num-integer", "num-iter", "num-traits", - "rand 0.8.5", + "rand 0.8.6", "smallvec", "zeroize", ] @@ -2105,9 +2121,9 @@ checksum = "c33a9471896f1c69cecef8d20cbe2f7accd12527ce60845ff44c153bb2a21b49" [[package]] name = "portable-atomic-util" -version = "0.2.6" +version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "091397be61a01d4be58e7841595bd4bfedb15f1cd54977d79b8271e94ed799a3" +checksum = "c2a106d1259c23fac8e543272398ae0e3c0b8d33c88ed73d0cc71b0f1d902618" dependencies = [ "portable-atomic", ] @@ -2219,9 +2235,9 @@ checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" [[package]] name = "rand" -version = "0.8.5" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +checksum = "5ca0ecfa931c29007047d1bc58e623ab12e5590e8c7cc53200d5202b69266d8a" dependencies = [ "rand_chacha 0.3.1", "rand_core 0.6.4", @@ -3114,6 +3130,12 @@ version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" +[[package]] +name = "symlink" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7973cce6668464ea31f176d85b13c7ab3bba2cb3b77a2ed26abd7801688010a" + [[package]] name = "syn" version = "1.0.109" @@ -3269,9 +3291,9 @@ dependencies = [ [[package]] name = "tokio" -version = "1.52.0" +version = "1.52.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a91135f59b1cbf38c91e73cf3386fca9bb77915c45ce2771460c9d92f0f3d776" +checksum = "b67dee974fe86fd92cc45b7a95fdd2f99a36a6d7b0d431a231178d3d670bbcc6" dependencies = [ "bytes", "libc", @@ -3464,11 +3486,12 @@ dependencies = [ [[package]] name = "tracing-appender" -version = "0.2.4" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "786d480bce6247ab75f005b14ae1624ad978d3029d9113f0a22fa1ac773faeaf" +checksum = "050686193eb999b4bb3bc2acfa891a13da00f79734704c4b8b4ef1a10b368a3c" dependencies = [ "crossbeam-channel", + "symlink", "thiserror 2.0.18", "time", "tracing-subscriber", @@ -3561,9 +3584,9 @@ checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" [[package]] name = "typenum" -version = "1.19.0" +version = "1.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb" +checksum = "40ce102ab67701b8526c123c1bab5cbe42d7040ccfd0f64af1a385808d2f43de" [[package]] name = "ucd-trie" @@ -3661,9 +3684,9 @@ checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" [[package]] name = "wasip2" -version = "1.0.2+wasi-0.2.9" +version = "1.0.3+wasi-0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9517f9239f02c069db75e65f174b3da828fe5f5b945c4dd26bd25d89c03ebcf5" +checksum = "20064672db26d7cdc89c7798c48a0fdfac8213434a1186e5ef29fd560ae223d6" dependencies = [ "wit-bindgen", ] @@ -3895,9 +3918,9 @@ dependencies = [ [[package]] name = "wit-bindgen" -version = "0.51.0" +version = "0.57.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5" +checksum = "1ebf944e87a7c253233ad6766e082e3cd714b5d03812acc24c318f549614536e" [[package]] name = "writeable" diff --git a/Cargo.nix b/Cargo.nix index ceb2dfb4..939286fb 100644 --- a/Cargo.nix +++ b/Cargo.nix @@ -1226,9 +1226,9 @@ rec { }; "const_format" = rec { crateName = "const_format"; - version = "0.2.35"; + version = "0.2.36"; edition = "2021"; - sha256 = "1b9h03z3k76ail1ldqxcqmsc4raa7dwgwwqwrjf6wmism5lp9akz"; + sha256 = "07ncczs8yndga2f8p4386c827l4fxwzl0pbwp7ijnhcsmlbsd0a4"; authors = [ "rodrimati1992 " ]; @@ -1237,6 +1237,12 @@ rec { name = "const_format_proc_macros"; packageId = "const_format_proc_macros"; } + { + name = "konst"; + packageId = "konst"; + usesDefaultFeatures = false; + features = [ "rust_1_64" ]; + } ]; features = { "__debug" = [ "const_format_proc_macros/debug" ]; @@ -1250,10 +1256,9 @@ rec { "constant_time_as_str" = [ "fmt" ]; "derive" = [ "fmt" "const_format_proc_macros/derive" ]; "fmt" = [ "rust_1_83" ]; - "konst" = [ "dep:konst" ]; "more_str_macros" = [ "rust_1_64" ]; "nightly_const_generics" = [ "const_generics" ]; - "rust_1_64" = [ "rust_1_51" "konst" "konst/rust_1_64" ]; + "rust_1_64" = [ "rust_1_51" ]; "rust_1_83" = [ "rust_1_64" ]; }; resolvedDefaultFeatures = [ "default" ]; @@ -4863,7 +4868,7 @@ rec { src = pkgs.fetchgit { url = "https://github.com/stackabletech/operator-rs.git"; rev = "96f42571ea185a3cd76fedde351fcabbeefcae16"; - sha256 = "14n9a3j3l50iixybfzg4x8gmfxv4idkyvfxmb48d552isjqcg83z"; + sha256 = "0d58yvxvy8hbai12bjhcyvh4zw182j5dsfyqja4k2xc1vzjy29by"; }; libName = "k8s_version"; authors = [ @@ -4890,6 +4895,53 @@ rec { }; resolvedDefaultFeatures = [ "darling" ]; }; + "konst" = rec { + crateName = "konst"; + version = "0.2.20"; + edition = "2018"; + sha256 = "1yyf1fhk28wbf1lqrga9as4cpfmpbry9a5vvdqyxgz14g3nk708j"; + authors = [ + "rodrimati1992 " + ]; + dependencies = [ + { + name = "konst_macro_rules"; + packageId = "konst_macro_rules"; + } + ]; + features = { + "__ui" = [ "__test" "trybuild" "rust_latest_stable" ]; + "const_generics" = [ "rust_1_51" ]; + "constant_time_slice" = [ "rust_latest_stable" ]; + "default" = [ "cmp" "parsing" ]; + "deref_raw_in_fn" = [ "rust_1_56" ]; + "konst_proc_macros" = [ "dep:konst_proc_macros" ]; + "mut_refs" = [ "rust_latest_stable" "konst_macro_rules/mut_refs" ]; + "nightly_mut_refs" = [ "mut_refs" "konst_macro_rules/nightly_mut_refs" ]; + "parsing" = [ "parsing_no_proc" "konst_proc_macros" ]; + "rust_1_51" = [ "konst_macro_rules/rust_1_51" ]; + "rust_1_55" = [ "rust_1_51" "konst_macro_rules/rust_1_55" ]; + "rust_1_56" = [ "rust_1_55" "konst_macro_rules/rust_1_56" ]; + "rust_1_57" = [ "rust_1_56" "konst_macro_rules/rust_1_57" ]; + "rust_1_61" = [ "rust_1_57" "konst_macro_rules/rust_1_61" ]; + "rust_1_64" = [ "rust_1_61" ]; + "rust_latest_stable" = [ "rust_1_64" ]; + "trybuild" = [ "dep:trybuild" ]; + }; + resolvedDefaultFeatures = [ "rust_1_51" "rust_1_55" "rust_1_56" "rust_1_57" "rust_1_61" "rust_1_64" ]; + }; + "konst_macro_rules" = rec { + crateName = "konst_macro_rules"; + version = "0.2.19"; + edition = "2018"; + sha256 = "0dswja0dqcww4x3fwjnirc0azv2n6cazn8yv0kddksd8awzkz4x4"; + authors = [ + "rodrimati1992 " + ]; + features = { + }; + resolvedDefaultFeatures = [ "rust_1_51" "rust_1_55" "rust_1_56" "rust_1_57" "rust_1_61" ]; + }; "kube" = rec { crateName = "kube"; version = "3.1.0"; @@ -5843,7 +5895,7 @@ rec { } { name = "rand"; - packageId = "rand 0.8.5"; + packageId = "rand 0.8.6"; optional = true; usesDefaultFeatures = false; } @@ -5862,7 +5914,7 @@ rec { devDependencies = [ { name = "rand"; - packageId = "rand 0.8.5"; + packageId = "rand 0.8.6"; features = [ "small_rng" ]; } ]; @@ -6969,9 +7021,9 @@ rec { }; "portable-atomic-util" = rec { crateName = "portable-atomic-util"; - version = "0.2.6"; + version = "0.2.7"; edition = "2018"; - sha256 = "18wrsx7fjwc2kgbpfjfm3igv3vdzsidmjhbqivjln7d0c6z9f4q9"; + sha256 = "0616j0fhy6y71hyxg3n86f6hng0fmsc269s3wp4gl8ww4p8hd8f2"; libName = "portable_atomic_util"; dependencies = [ { @@ -6982,6 +7034,7 @@ rec { } ]; features = { + "serde" = [ "dep:serde" ]; "std" = [ "alloc" ]; }; resolvedDefaultFeatures = [ "alloc" ]; @@ -7265,11 +7318,11 @@ rec { "rustc-dep-of-std" = [ "core" ]; }; }; - "rand 0.8.5" = rec { + "rand 0.8.6" = rec { crateName = "rand"; - version = "0.8.5"; + version = "0.8.6"; edition = "2018"; - sha256 = "013l6931nn7gkc23jz5mm3qdhf93jjf0fg64nz2lp4i51qd8vbrl"; + sha256 = "12kd4rljn86m00rcaz4c1rcya4mb4gk5ig6i8xq00a8wjgxfr82w"; authors = [ "The Rand Project Developers" "The Rust Project Developers" @@ -7291,12 +7344,9 @@ rec { "default" = [ "std" "std_rng" ]; "getrandom" = [ "rand_core/getrandom" ]; "libc" = [ "dep:libc" ]; - "log" = [ "dep:log" ]; - "packed_simd" = [ "dep:packed_simd" ]; "rand_chacha" = [ "dep:rand_chacha" ]; "serde" = [ "dep:serde" ]; "serde1" = [ "serde" "rand_core/serde1" ]; - "simd_support" = [ "packed_simd" ]; "std" = [ "rand_core/std" "rand_chacha/std" "alloc" "getrandom" "libc" ]; "std_rng" = [ "rand_chacha" ]; }; @@ -9593,7 +9643,7 @@ rec { src = pkgs.fetchgit { url = "https://github.com/stackabletech/operator-rs.git"; rev = "96f42571ea185a3cd76fedde351fcabbeefcae16"; - sha256 = "14n9a3j3l50iixybfzg4x8gmfxv4idkyvfxmb48d552isjqcg83z"; + sha256 = "0d58yvxvy8hbai12bjhcyvh4zw182j5dsfyqja4k2xc1vzjy29by"; }; libName = "stackable_certs"; authors = [ @@ -9696,7 +9746,7 @@ rec { src = pkgs.fetchgit { url = "https://github.com/stackabletech/operator-rs.git"; rev = "96f42571ea185a3cd76fedde351fcabbeefcae16"; - sha256 = "14n9a3j3l50iixybfzg4x8gmfxv4idkyvfxmb48d552isjqcg83z"; + sha256 = "0d58yvxvy8hbai12bjhcyvh4zw182j5dsfyqja4k2xc1vzjy29by"; }; libName = "stackable_operator"; authors = [ @@ -9876,7 +9926,7 @@ rec { src = pkgs.fetchgit { url = "https://github.com/stackabletech/operator-rs.git"; rev = "96f42571ea185a3cd76fedde351fcabbeefcae16"; - sha256 = "14n9a3j3l50iixybfzg4x8gmfxv4idkyvfxmb48d552isjqcg83z"; + sha256 = "0d58yvxvy8hbai12bjhcyvh4zw182j5dsfyqja4k2xc1vzjy29by"; }; procMacro = true; libName = "stackable_operator_derive"; @@ -9911,7 +9961,7 @@ rec { src = pkgs.fetchgit { url = "https://github.com/stackabletech/operator-rs.git"; rev = "96f42571ea185a3cd76fedde351fcabbeefcae16"; - sha256 = "14n9a3j3l50iixybfzg4x8gmfxv4idkyvfxmb48d552isjqcg83z"; + sha256 = "0d58yvxvy8hbai12bjhcyvh4zw182j5dsfyqja4k2xc1vzjy29by"; }; libName = "stackable_shared"; authors = [ @@ -9992,7 +10042,7 @@ rec { src = pkgs.fetchgit { url = "https://github.com/stackabletech/operator-rs.git"; rev = "96f42571ea185a3cd76fedde351fcabbeefcae16"; - sha256 = "14n9a3j3l50iixybfzg4x8gmfxv4idkyvfxmb48d552isjqcg83z"; + sha256 = "0d58yvxvy8hbai12bjhcyvh4zw182j5dsfyqja4k2xc1vzjy29by"; }; libName = "stackable_telemetry"; authors = [ @@ -10102,7 +10152,7 @@ rec { src = pkgs.fetchgit { url = "https://github.com/stackabletech/operator-rs.git"; rev = "96f42571ea185a3cd76fedde351fcabbeefcae16"; - sha256 = "14n9a3j3l50iixybfzg4x8gmfxv4idkyvfxmb48d552isjqcg83z"; + sha256 = "0d58yvxvy8hbai12bjhcyvh4zw182j5dsfyqja4k2xc1vzjy29by"; }; libName = "stackable_versioned"; authors = [ @@ -10146,7 +10196,7 @@ rec { src = pkgs.fetchgit { url = "https://github.com/stackabletech/operator-rs.git"; rev = "96f42571ea185a3cd76fedde351fcabbeefcae16"; - sha256 = "14n9a3j3l50iixybfzg4x8gmfxv4idkyvfxmb48d552isjqcg83z"; + sha256 = "0d58yvxvy8hbai12bjhcyvh4zw182j5dsfyqja4k2xc1vzjy29by"; }; procMacro = true; libName = "stackable_versioned_macros"; @@ -10214,7 +10264,7 @@ rec { src = pkgs.fetchgit { url = "https://github.com/stackabletech/operator-rs.git"; rev = "96f42571ea185a3cd76fedde351fcabbeefcae16"; - sha256 = "14n9a3j3l50iixybfzg4x8gmfxv4idkyvfxmb48d552isjqcg83z"; + sha256 = "0d58yvxvy8hbai12bjhcyvh4zw182j5dsfyqja4k2xc1vzjy29by"; }; libName = "stackable_webhook"; authors = [ @@ -10416,6 +10466,16 @@ rec { }; resolvedDefaultFeatures = [ "i128" ]; }; + "symlink" = rec { + crateName = "symlink"; + version = "0.1.0"; + edition = "2015"; + sha256 = "02h1i0b81mxb4vns4xrvrfibpcvs7jqqav8p3yilwik8cv73r5x7"; + authors = [ + "Chris Morgan " + ]; + + }; "syn 1.0.109" = rec { crateName = "syn"; version = "1.0.109"; @@ -10848,9 +10908,9 @@ rec { }; "tokio" = rec { crateName = "tokio"; - version = "1.52.0"; + version = "1.52.1"; edition = "2021"; - sha256 = "0xnpygq9578c8rqjgkj5bj8pgfx9zj337kvk3v4kigqwkgska4d9"; + sha256 = "1imw1dkkv38p66i33m5hsyk3d6prsbyrayjvqhndjvz89ybywzdn"; authors = [ "Tokio Contributors " ]; @@ -11689,9 +11749,9 @@ rec { }; "tracing-appender" = rec { crateName = "tracing-appender"; - version = "0.2.4"; + version = "0.2.5"; edition = "2018"; - sha256 = "1bxf7xvsr89glbq174cx0b9pinaacbhlmc85y1ssniv2rq5lhvbq"; + sha256 = "0g4a6q5s3wafid5lqw1ljzvh1nhk3a4zmb627fxv96dr7qcqc1h5"; libName = "tracing_appender"; authors = [ "Zeki Sherif " @@ -11702,6 +11762,10 @@ rec { name = "crossbeam-channel"; packageId = "crossbeam-channel"; } + { + name = "symlink"; + packageId = "symlink"; + } { name = "thiserror"; packageId = "thiserror 2.0.18"; @@ -12056,9 +12120,9 @@ rec { }; "typenum" = rec { crateName = "typenum"; - version = "1.19.0"; + version = "1.20.0"; edition = "2018"; - sha256 = "1fw2mpbn2vmqan56j1b3fbpcdg80mz26fm53fs16bq5xcq84hban"; + sha256 = "1pj35y6q11d3y55gdl6g1h2dfhmybjming0jdi9bh0bpnqm11kj0"; authors = [ "Paho Lurie-Gregg " "Andre Bogus " @@ -12290,9 +12354,9 @@ rec { }; "wasip2" = rec { crateName = "wasip2"; - version = "1.0.2+wasi-0.2.9"; + version = "1.0.3+wasi-0.2.9"; edition = "2021"; - sha256 = "1xdw7v08jpfjdg94sp4lbdgzwa587m5ifpz6fpdnkh02kwizj5wm"; + sha256 = "1mi3w855dz99xzjqc4aa8c9q5b6z1y5c963pkk4cvmr6vdr4c1i0"; dependencies = [ { name = "wit-bindgen"; @@ -13792,19 +13856,20 @@ rec { }; "wit-bindgen" = rec { crateName = "wit-bindgen"; - version = "0.51.0"; + version = "0.57.1"; edition = "2024"; - sha256 = "19fazgch8sq5cvjv3ynhhfh5d5x08jq2pkw8jfb05vbcyqcr496p"; + sha256 = "0vjk2jb593ri9k1aq4iqs2si9mrw5q46wxnn78im7hm7hx799gqy"; libName = "wit_bindgen"; authors = [ "Alex Crichton " ]; features = { - "async" = [ "std" "wit-bindgen-rust-macro?/async" ]; - "async-spawn" = [ "async" "dep:futures" ]; + "async-spawn" = [ "async" "dep:futures" "std" ]; "bitflags" = [ "dep:bitflags" ]; - "default" = [ "macros" "realloc" "async" "std" "bitflags" ]; + "default" = [ "macros" "realloc" "async" "std" "bitflags" "macro-string" ]; + "futures-stream" = [ "async" "dep:futures" ]; "inter-task-wakeup" = [ "async" ]; + "macro-string" = [ "wit-bindgen-rust-macro?/macro-string" ]; "macros" = [ "dep:wit-bindgen-rust-macro" ]; "rustc-dep-of-std" = [ "dep:core" "dep:alloc" ]; }; diff --git a/Cargo.toml b/Cargo.toml index 07505e63..a7feba3e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -34,6 +34,5 @@ tokio = { version = "1.40", features = ["full"] } tracing = "0.1" [patch."https://github.com/stackabletech/operator-rs.git"] -# TODO revert this before merging! # stackable-operator = { git = "https://github.com/stackabletech//operator-rs.git", branch = "main" } # stackable-operator = { path = "../operator-rs/crates/stackable-operator" } diff --git a/crate-hashes.json b/crate-hashes.json index 7a259ca2..e19b553d 100644 --- a/crate-hashes.json +++ b/crate-hashes.json @@ -1,12 +1,12 @@ { - "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.110.1#k8s-version@0.1.3": "14n9a3j3l50iixybfzg4x8gmfxv4idkyvfxmb48d552isjqcg83z", - "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.110.1#stackable-certs@0.4.0": "14n9a3j3l50iixybfzg4x8gmfxv4idkyvfxmb48d552isjqcg83z", - "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.110.1#stackable-operator-derive@0.3.1": "14n9a3j3l50iixybfzg4x8gmfxv4idkyvfxmb48d552isjqcg83z", - "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.110.1#stackable-operator@0.110.1": "14n9a3j3l50iixybfzg4x8gmfxv4idkyvfxmb48d552isjqcg83z", - "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.110.1#stackable-shared@0.1.0": "14n9a3j3l50iixybfzg4x8gmfxv4idkyvfxmb48d552isjqcg83z", - "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.110.1#stackable-telemetry@0.6.3": "14n9a3j3l50iixybfzg4x8gmfxv4idkyvfxmb48d552isjqcg83z", - "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.110.1#stackable-versioned-macros@0.9.0": "14n9a3j3l50iixybfzg4x8gmfxv4idkyvfxmb48d552isjqcg83z", - "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.110.1#stackable-versioned@0.9.0": "14n9a3j3l50iixybfzg4x8gmfxv4idkyvfxmb48d552isjqcg83z", - "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.110.1#stackable-webhook@0.9.1": "14n9a3j3l50iixybfzg4x8gmfxv4idkyvfxmb48d552isjqcg83z", + "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.110.1#k8s-version@0.1.3": "0d58yvxvy8hbai12bjhcyvh4zw182j5dsfyqja4k2xc1vzjy29by", + "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.110.1#stackable-certs@0.4.0": "0d58yvxvy8hbai12bjhcyvh4zw182j5dsfyqja4k2xc1vzjy29by", + "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.110.1#stackable-operator-derive@0.3.1": "0d58yvxvy8hbai12bjhcyvh4zw182j5dsfyqja4k2xc1vzjy29by", + "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.110.1#stackable-operator@0.110.1": "0d58yvxvy8hbai12bjhcyvh4zw182j5dsfyqja4k2xc1vzjy29by", + "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.110.1#stackable-shared@0.1.0": "0d58yvxvy8hbai12bjhcyvh4zw182j5dsfyqja4k2xc1vzjy29by", + "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.110.1#stackable-telemetry@0.6.3": "0d58yvxvy8hbai12bjhcyvh4zw182j5dsfyqja4k2xc1vzjy29by", + "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.110.1#stackable-versioned-macros@0.9.0": "0d58yvxvy8hbai12bjhcyvh4zw182j5dsfyqja4k2xc1vzjy29by", + "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.110.1#stackable-versioned@0.9.0": "0d58yvxvy8hbai12bjhcyvh4zw182j5dsfyqja4k2xc1vzjy29by", + "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.110.1#stackable-webhook@0.9.1": "0d58yvxvy8hbai12bjhcyvh4zw182j5dsfyqja4k2xc1vzjy29by", "git+https://github.com/stackabletech/product-config.git?tag=0.8.0#product-config@0.8.0": "1dz70kapm2wdqcr7ndyjji0lhsl98bsq95gnb2lw487wf6yr7987" } \ No newline at end of file diff --git a/deploy/config-spec/properties.yaml b/deploy/config-spec/properties.yaml index 4404b88e..9bd8c3b2 100644 --- a/deploy/config-spec/properties.yaml +++ b/deploy/config-spec/properties.yaml @@ -1,16 +1,5 @@ +--- version: 0.1.0 spec: units: [] -properties: - - property: &credentialsSecret - propertyNames: - - name: "credentialsSecret" - kind: - type: "env" - datatype: - type: "string" - roles: - - name: "node" - required: true - asOfVersion: "0.0.0" - description: "The secret where the Airflow credentials are stored." +properties: [] diff --git a/deploy/helm/airflow-operator/configs/properties.yaml b/deploy/helm/airflow-operator/configs/properties.yaml index 4404b88e..9bd8c3b2 100644 --- a/deploy/helm/airflow-operator/configs/properties.yaml +++ b/deploy/helm/airflow-operator/configs/properties.yaml @@ -1,16 +1,5 @@ +--- version: 0.1.0 spec: units: [] -properties: - - property: &credentialsSecret - propertyNames: - - name: "credentialsSecret" - kind: - type: "env" - datatype: - type: "string" - roles: - - name: "node" - required: true - asOfVersion: "0.0.0" - description: "The secret where the Airflow credentials are stored." +properties: [] diff --git a/docs/modules/airflow/examples/example-airflow-dags-configmap.yaml b/docs/modules/airflow/examples/example-airflow-dags-configmap.yaml index afb8f558..c63da5c5 100644 --- a/docs/modules/airflow/examples/example-airflow-dags-configmap.yaml +++ b/docs/modules/airflow/examples/example-airflow-dags-configmap.yaml @@ -7,9 +7,12 @@ spec: image: productVersion: 3.1.6 clusterConfig: - loadExamples: false - exposeConfig: false - credentialsSecret: simple-airflow-credentials + credentialsSecretName: airflow-admin-credentials + metadataDatabase: + postgresql: + host: airflow-postgresql + database: airflow + credentialsSecretName: airflow-postgresql-credentials volumes: - name: cm-dag # <3> configMap: @@ -27,14 +30,4 @@ spec: AIRFLOW__CORE__DAGS_FOLDER: "/dags" # <8> replicas: 1 celeryExecutors: - roleGroups: - default: - envOverrides: - AIRFLOW__CORE__DAGS_FOLDER: "/dags" # <8> - replicas: 2 - schedulers: - roleGroups: - default: - envOverrides: - AIRFLOW__CORE__DAGS_FOLDER: "/dags" # <8> - replicas: 1 + # ... diff --git a/docs/modules/airflow/examples/example-airflow-gitsync-https.yaml b/docs/modules/airflow/examples/example-airflow-gitsync-https.yaml index 63db20cb..965c061d 100644 --- a/docs/modules/airflow/examples/example-airflow-gitsync-https.yaml +++ b/docs/modules/airflow/examples/example-airflow-gitsync-https.yaml @@ -9,7 +9,7 @@ spec: clusterConfig: loadExamples: false exposeConfig: false - credentialsSecret: test-airflow-credentials # <1> + credentialsSecretName: airflow-admin-credentials # <1> dagsGitSync: # <2> - repo: https://github.com/stackabletech/airflow-operator # <3> branch: "main" # <4> @@ -17,7 +17,7 @@ spec: depth: 10 # <6> wait: 20s # <7> credentials: - basicAuthSecretName: git-credentials # <8> + basicAuthSecretName: airflow-git-credentials # <8> gitSyncConf: # <9> --rev: HEAD # <10> # --rev: git-sync-tag # N.B. tag must be covered by "depth" (the number of commits to clone) @@ -33,7 +33,7 @@ spec: apiVersion: v1 kind: Secret metadata: - name: git-credentials # <8> + name: airflow-git-credentials # <8> type: Opaque data: user: c3Rh... diff --git a/docs/modules/airflow/examples/example-airflow-incluster.yaml b/docs/modules/airflow/examples/example-airflow-incluster.yaml index ca437df9..2ae9327a 100644 --- a/docs/modules/airflow/examples/example-airflow-incluster.yaml +++ b/docs/modules/airflow/examples/example-airflow-incluster.yaml @@ -9,28 +9,39 @@ spec: clusterConfig: loadExamples: false exposeConfig: false - credentialsSecret: simple-airflow-credentials + credentialsSecretName: airflow-admin-credentials + metadataDatabase: + postgresql: + host: airflow-postgresql + database: airflow + credentialsSecretName: airflow-postgresql-credentials webservers: roleConfig: listenerClass: external-unstable roleGroups: default: - envOverrides: + envOverrides: &envOverrides AIRFLOW_CONN_KUBERNETES_IN_CLUSTER: "kubernetes://?__extra__=%7B%22extra__kubernetes__in_cluster%22%3A+true%2C+%22extra__kubernetes__kube_config%22%3A+%22%22%2C+%22extra__kubernetes__kube_config_path%22%3A+%22%22%2C+%22extra__kubernetes__namespace%22%3A+%22%22%7D" replicas: 1 schedulers: roleGroups: default: - envOverrides: - AIRFLOW_CONN_KUBERNETES_IN_CLUSTER: "kubernetes://?__extra__=%7B%22extra__kubernetes__in_cluster%22%3A+true%2C+%22extra__kubernetes__kube_config%22%3A+%22%22%2C+%22extra__kubernetes__kube_config_path%22%3A+%22%22%2C+%22extra__kubernetes__namespace%22%3A+%22%22%7D" + envOverrides: *envOverrides replicas: 1 celeryExecutors: + resultBackend: + postgresql: + host: airflow-postgresql + database: airflow + credentialsSecretName: airflow-postgresql-credentials + broker: + redis: + host: airflow-redis-master + credentialsSecretName: airflow-redis-credentials roleGroups: default: - envOverrides: - AIRFLOW_CONN_KUBERNETES_IN_CLUSTER: "kubernetes://?__extra__=%7B%22extra__kubernetes__in_cluster%22%3A+true%2C+%22extra__kubernetes__kube_config%22%3A+%22%22%2C+%22extra__kubernetes__kube_config_path%22%3A+%22%22%2C+%22extra__kubernetes__namespace%22%3A+%22%22%7D" + envOverrides: *envOverrides replicas: 1 # in case of using kubernetesExecutors # kubernetesExecutors: -# envOverrides: -# AIRFLOW_CONN_KUBERNETES_IN_CLUSTER: "kubernetes://?__extra__=%7B%22extra__kubernetes__in_cluster%22%3A+true%2C+%22extra__kubernetes__kube_config%22%3A+%22%22%2C+%22extra__kubernetes__kube_config_path%22%3A+%22%22%2C+%22extra__kubernetes__namespace%22%3A+%22%22%7D" +# envOverrides: *envOverrides diff --git a/docs/modules/airflow/examples/example-airflow-secret.yaml b/docs/modules/airflow/examples/example-airflow-secret.yaml deleted file mode 100644 index 5e112e91..00000000 --- a/docs/modules/airflow/examples/example-airflow-secret.yaml +++ /dev/null @@ -1,16 +0,0 @@ ---- -apiVersion: v1 -kind: Secret -metadata: - name: simple-airflow-credentials -type: Opaque -stringData: - adminUser.username: airflow - adminUser.firstname: Airflow - adminUser.lastname: Admin - adminUser.email: airflow@airflow.com - adminUser.password: airflow - connections.sqlalchemyDatabaseUri: postgresql+psycopg2://airflow:airflow@airflow-postgresql.default.svc.cluster.local/airflow - # Only needed when using celery workers (instead of Kubernetes executors) - connections.celeryResultBackend: db+postgresql://airflow:airflow@airflow-postgresql.default.svc.cluster.local/airflow - connections.celeryBrokerUrl: redis://:redis@airflow-redis-master:6379/0 diff --git a/docs/modules/airflow/examples/getting_started/code/airflow-credentials.yaml b/docs/modules/airflow/examples/getting_started/code/airflow-credentials.yaml index 5e112e91..aed50808 100644 --- a/docs/modules/airflow/examples/getting_started/code/airflow-credentials.yaml +++ b/docs/modules/airflow/examples/getting_started/code/airflow-credentials.yaml @@ -2,7 +2,7 @@ apiVersion: v1 kind: Secret metadata: - name: simple-airflow-credentials + name: airflow-admin-credentials type: Opaque stringData: adminUser.username: airflow @@ -10,7 +10,21 @@ stringData: adminUser.lastname: Admin adminUser.email: airflow@airflow.com adminUser.password: airflow - connections.sqlalchemyDatabaseUri: postgresql+psycopg2://airflow:airflow@airflow-postgresql.default.svc.cluster.local/airflow - # Only needed when using celery workers (instead of Kubernetes executors) - connections.celeryResultBackend: db+postgresql://airflow:airflow@airflow-postgresql.default.svc.cluster.local/airflow - connections.celeryBrokerUrl: redis://:redis@airflow-redis-master:6379/0 +--- +# Only needed when using celery workers (instead of Kubernetes executors) +apiVersion: v1 +kind: Secret +metadata: + name: airflow-postgresql-credentials +stringData: + username: airflow + password: airflow +--- +# Only needed when using celery workers (instead of Kubernetes executors) +apiVersion: v1 +kind: Secret +metadata: + name: airflow-redis-credentials +stringData: + username: "" + password: redis diff --git a/docs/modules/airflow/examples/getting_started/code/airflow.yaml b/docs/modules/airflow/examples/getting_started/code/airflow.yaml index a2c1b646..0c95fb3b 100644 --- a/docs/modules/airflow/examples/getting_started/code/airflow.yaml +++ b/docs/modules/airflow/examples/getting_started/code/airflow.yaml @@ -10,7 +10,12 @@ spec: clusterConfig: loadExamples: true exposeConfig: false - credentialsSecret: simple-airflow-credentials + credentialsSecretName: airflow-admin-credentials + metadataDatabase: + postgresql: + host: airflow-postgresql + database: airflow + credentialsSecretName: airflow-postgresql-credentials webservers: roleConfig: listenerClass: external-unstable @@ -18,6 +23,15 @@ spec: default: replicas: 1 celeryExecutors: + resultBackend: + postgresql: + host: airflow-postgresql + database: airflow + credentialsSecretName: airflow-postgresql-credentials + broker: + redis: + host: airflow-redis-master + credentialsSecretName: airflow-redis-credentials roleGroups: default: replicas: 1 diff --git a/docs/modules/airflow/pages/getting_started/first_steps.adoc b/docs/modules/airflow/pages/getting_started/first_steps.adoc index f25fcd2c..f1eb3290 100644 --- a/docs/modules/airflow/pages/getting_started/first_steps.adoc +++ b/docs/modules/airflow/pages/getting_started/first_steps.adoc @@ -10,9 +10,10 @@ With the external dependencies required by Airflow (Postgresql and Redis) instal Supported versions for PostgreSQL and Redis can be found in the https://airflow.apache.org/docs/apache-airflow/stable/installation/prerequisites.html#prerequisites[Airflow documentation]. -== Secret with Airflow credentials +== Airflow secrets -Create a Secret with the necessary credentials, this entails database connection credentials as well as an admin account for Airflow itself. +Secrets are required for the mandatory metadata database connection and the Airflow admin user. +When using celery executors it's also required to provide information for the celery database and broker. Create a file called `airflow-credentials.yaml`: [source,yaml] @@ -23,15 +24,14 @@ And apply it: [source,bash] include::example$getting_started/code/getting_started.sh[tag=apply-airflow-credentials] -`connections.sqlalchemyDatabaseUri` must contain the connection string to the SQL database storing the Airflow metadata. +`airflow-postgresql-credentials` contains credentials for the SQL database storing the Airflow metadata. +In this example we will use the same database for both the Airflow job metadata as well as the Celery broker metadata. -`connections.celeryResultBackend` must contain the connection string to the SQL database storing the job metadata (the example above uses the same PostgreSQL database for both). +`airflow-redis-credentials` contains credentials for the the Redis instance used for queuing the jobs submitted to the Airflow executor(s). -`connections.celeryBrokerUrl` must contain the connection string to the Redis instance used for queuing the jobs submitted to the airflow executor(s). +`airflow-admin-credentials`: the `adminUser.*` fields are used to create the initial admin user. -The `adminUser` fields are used to create an admin user. - -NOTE: The admin user is disabled if you use a non-default authentication mechanism like LDAP. +NOTE: The admin user is disabled if you use a non-default authentication mechanism like LDAP or OIDC. == Airflow @@ -60,21 +60,27 @@ include::example$getting_started/code/getting_started.sh[tag=install-airflow] Where: -* `metadata.name` contains the name of the Airflow cluster. +* `metadata.name`: contains the name of the Airflow cluster. +* `spec.clusterConfig.metadataDatabase`: specifies one of the supported database types (in this case, `postgresql`) along with references to the host, database and the secret containing the connection credentials. * the product version of the Docker image provided by Stackable must be set in `spec.image.productVersion`. * `spec.celeryExecutors`: deploy executors managed by Airflow's Celery engine. Alternatively you can use `kuberenetesExectors` that use Airflow's Kubernetes engine for executor management. For more information see https://airflow.apache.org/docs/apache-airflow/stable/executor/index.html#executor-types). -* the `spec.clusterConfig.loadExamples` key is optional and defaults to `false`. +* `spec.celeryExecutors.resultBackend`: specifies one of the supported database types (in this case, `postgresql`) along with references to the host, database and the secret containing the connection credentials. +* `spec.celeryExecutors.broker`: specifies one of the supported queue/broker types (in this case, `redis`) along with references to the host and the secret containing the connection credentials. +* `spec.clusterConfig.loadExamples`: this key is optional and defaults to `false`. It is set to `true` here as the example DAGs are used when verifying the installation. -* the `spec.clusterConfig.exposeConfig` key is optional and defaults to `false`. It is set to `true` only as an aid to verify the configuration and should never be used as such in anything other than test or demo clusters. -* the previously created secret must be referenced in `spec.clusterConfig.credentialsSecret`. +* `spec.clusterConfig.exposeConfig`: this key is optional and defaults to `false`. +It is set to `true` only as an aid to verify the configuration and should never be used as such in anything other than test or demo clusters. +* `spec.clusterConfig.credentialsSecretName`: specifies the secret containing the Airflow admin user information. NOTE: The version you need to specify for `spec.image.productVersion` is the desired version of Apache Airflow. You can optionally specify the `spec.image.stackableVersion` to a certain release like `23.11.0` but it is recommended to leave it out and use the default provided by the operator. Check our https://oci.stackable.tech/[image registry,window=_blank] for a list of available versions. Information on how to browse the registry can be found xref:contributor:project-overview.adoc#docker-images[here,window=_blank]. It should generally be safe to simply use the latest version that is available. +NOTE: Refer to xref:usage-guide/database-connections.adoc[] for more information about database/broker connections. + This creates the actual Airflow cluster. After a while, all the Pods in the StatefulSets should be ready: diff --git a/docs/modules/airflow/pages/usage-guide/database-connections.adoc b/docs/modules/airflow/pages/usage-guide/database-connections.adoc new file mode 100644 index 00000000..80d15cc8 --- /dev/null +++ b/docs/modules/airflow/pages/usage-guide/database-connections.adoc @@ -0,0 +1,70 @@ += Database connections +:description: Configure Airflow Database connectivity. + +Airflow requires a metadata database for storing e.g. DAG, task and job data. +The actual connection string is calculated by the operator so that the user does not need to remember the exact structure. + +== Typed connections + +[source,yaml] +---- +spec: + clusterConfig: + metadataDatabase: + postgresql: # <1> + host: airflow-postgresql + database: airflow + credentialsSecretName: airflow-postgresql-credentials # <2> +---- +<1> A reference to one of the supported database backends (e.g. `postgresql`). +<2> A reference to a Secret which must contain the two fields `username` and `password`. + +The queue/broker metadata and URL is only needed when running the celery executor. +The `resultBackend` definition uses the same structure as `metadataDatabase` shown above. +The `broker` definition requires Redis connection details. + +[source,yaml] +---- +spec: + celeryExecutors: + resultBackend: + postgresql: # <1> + host: airflow-postgresql + database: airflow + credentialsSecretName: airflow-postgresql-credentials # <2> + broker: + redis: # <3> + host: airflow-redis-master + credentialsSecretName: airflow-redis-credentials # <2> +---- +<1> A reference to one of the supported database backends (e.g. `postgresql`). +<2> A reference to a secret which must contain the two fields `username` and `password`. +<3> A reference to one of the supported queue brokers (e.g. `redis`). + +== Generic connections + +Alternatively, these connections can also be defined in full in a referenced secret: + +[source,yaml] +---- +spec: + clusterConfig: + metadataDatabase: + generic: + connectionUrlSecretName: postgresql-metadata # <1> +---- + +[source,yaml] +---- +spec: + resultBackend: + generic: + connectionUrlSecretName: postgresql-celery # <2> + broker: + generic: + connectionUrlSecretName: redis-celery # <3> +---- + +<1> A reference to a secret which must contain the single fields `connectionUrl` e.g. `postgresql+psycopg2://airflow:airflow@airflow-postgresql/airflow` +<2> A reference to a secret which must contain the single fields `connectionUrl` e.g. `db+postgresql://airflow:airflow@airflow-postgresql/airflow` +<3> A reference to a secret which must contain the single fields `connectionUrl` e.g. `redis://:redis@airflow-redis-master:6379/0` diff --git a/docs/modules/airflow/pages/usage-guide/logging.adoc b/docs/modules/airflow/pages/usage-guide/logging.adoc index bd82ee39..6b682690 100644 --- a/docs/modules/airflow/pages/usage-guide/logging.adoc +++ b/docs/modules/airflow/pages/usage-guide/logging.adoc @@ -23,6 +23,7 @@ spec: "flask_appbuilder": level: WARN celeryExecutors: + ... config: logging: enableVectorAgent: true diff --git a/docs/modules/airflow/pages/usage-guide/storage-resources.adoc b/docs/modules/airflow/pages/usage-guide/storage-resources.adoc index 3c399557..69b7772c 100644 --- a/docs/modules/airflow/pages/usage-guide/storage-resources.adoc +++ b/docs/modules/airflow/pages/usage-guide/storage-resources.adoc @@ -27,6 +27,7 @@ spec: default: replicas: 2 celeryExecutors: + ... config: resources: cpu: diff --git a/docs/modules/airflow/pages/usage-guide/using-kubernetes-executors.adoc b/docs/modules/airflow/pages/usage-guide/using-kubernetes-executors.adoc index cdfa0ae5..499872d9 100644 --- a/docs/modules/airflow/pages/usage-guide/using-kubernetes-executors.adoc +++ b/docs/modules/airflow/pages/usage-guide/using-kubernetes-executors.adoc @@ -12,6 +12,7 @@ E.g. you would change the following example ---- spec: celeryExecutors: + ... roleGroups: default: replicas: 2 diff --git a/docs/modules/airflow/partials/nav.adoc b/docs/modules/airflow/partials/nav.adoc index 381bdb96..e84b005f 100644 --- a/docs/modules/airflow/partials/nav.adoc +++ b/docs/modules/airflow/partials/nav.adoc @@ -4,6 +4,7 @@ * xref:airflow:required-external-components.adoc[] * xref:airflow:usage-guide/index.adoc[] ** xref:airflow:usage-guide/db-init.adoc[] +** xref:airflow:usage-guide/database-connections.adoc[] ** xref:airflow:usage-guide/mounting-dags.adoc[] ** xref:airflow:usage-guide/applying-custom-resources.adoc[] ** xref:airflow:usage-guide/listenerclass.adoc[] diff --git a/examples/simple-airflow-cluster-dags-cmap.yaml b/examples/simple-airflow-cluster-dags-cmap.yaml deleted file mode 100644 index d9f38242..00000000 --- a/examples/simple-airflow-cluster-dags-cmap.yaml +++ /dev/null @@ -1,116 +0,0 @@ ---- -apiVersion: v1 -kind: Secret -metadata: - name: simple-airflow-credentials -type: Opaque -stringData: - adminUser.username: airflow - adminUser.firstname: Airflow - adminUser.lastname: Admin - adminUser.email: airflow@airflow.com - adminUser.password: airflow - connections.sqlalchemyDatabaseUri: postgresql+psycopg2://airflow:airflow@airflow-postgresql.default.svc.cluster.local/airflow - # Only needed when using celery workers (instead of Kubernetes executors) - connections.celeryResultBackend: db+postgresql://airflow:airflow@airflow-postgresql.default.svc.cluster.local/airflow - connections.celeryBrokerUrl: redis://:redis@airflow-redis-master:6379/0 ---- -apiVersion: v1 -kind: ConfigMap -metadata: - name: cm-dag -data: - test_airflow_dag.py: | - from datetime import datetime, timedelta - from airflow import DAG - from airflow.operators.bash import BashOperator - from airflow.operators.dummy import DummyOperator - - with DAG( - dag_id='test_airflow_dag', - schedule='0 0 * * *', - start_date=datetime(2021, 1, 1), - catchup=False, - dagrun_timeout=timedelta(minutes=60), - tags=['example', 'example2'], - params={"example_key": "example_value"}, - ) as dag: - run_this_last = DummyOperator( - task_id='run_this_last', - ) - - # [START howto_operator_bash] - run_this = BashOperator( - task_id='run_after_loop', - bash_command='echo 1', - ) - # [END howto_operator_bash] - - run_this >> run_this_last - - for i in range(3): - task = BashOperator( - task_id='runme_' + str(i), - bash_command='echo "{{ task_instance_key_str }}" && sleep 1', - ) - task >> run_this - - # [START howto_operator_bash_template] - also_run_this = BashOperator( - task_id='also_run_this', - bash_command='echo "run_id={{ run_id }} | dag_run={{ dag_run }}"', - ) - # [END howto_operator_bash_template] - also_run_this >> run_this_last - - # [START howto_operator_bash_skip] - this_will_skip = BashOperator( - task_id='this_will_skip', - bash_command='echo "hello world"; exit 99;', - dag=dag, - ) - # [END howto_operator_bash_skip] - this_will_skip >> run_this_last - - if __name__ == "__main__": - dag.cli() ---- -apiVersion: airflow.stackable.tech/v1alpha1 -kind: AirflowCluster -metadata: - name: airflow-dags-cmap -spec: - image: - productVersion: 3.1.6 - clusterConfig: - loadExamples: false - exposeConfig: false - credentialsSecret: simple-airflow-credentials - volumes: - - name: cm-dag - configMap: - name: cm-dag - volumeMounts: - - name: cm-dag - mountPath: /dags/test_airflow_dag.py - subPath: test_airflow_dag.py - webservers: - roleConfig: - listenerClass: external-unstable - roleGroups: - default: - envOverrides: - AIRFLOW__CORE__DAGS_FOLDER: "/dags" - replicas: 1 - celeryExecutors: - roleGroups: - default: - envOverrides: - AIRFLOW__CORE__DAGS_FOLDER: "/dags" - replicas: 2 - schedulers: - roleGroups: - default: - envOverrides: - AIRFLOW__CORE__DAGS_FOLDER: "/dags" - replicas: 1 diff --git a/examples/simple-airflow-cluster-ldap-insecure-tls.yaml b/examples/simple-airflow-cluster-ldap-insecure-tls.yaml deleted file mode 100644 index d8a96ee2..00000000 --- a/examples/simple-airflow-cluster-ldap-insecure-tls.yaml +++ /dev/null @@ -1,181 +0,0 @@ -# helm install secret-operator oci://oci.stackable.tech/sdp-charts/secret-operator -# helm install commons-operator oci://oci.stackable.tech/sdp-charts/commons-operator -# helm install listener-operator oci://oci.stackable.tech/sdp-charts/listener-operator -# helm install airflow-operator oci://oci.stackable.tech/sdp-charts/airflow-operator -# helm install --repo https://charts.bitnami.com/bitnami --version 12.1.5 --set auth.username=airflow --set auth.password=airflow --set auth.database=airflow --set image.repository=bitnamilegacy/postgresql --set volumePermissions.image.repository=bitnamilegacy/os-shell --set metrics.image.repository=bitnamilegacy/postgres-exporter --set global.security.allowInsecureImages=true airflow-postgresql postgresql -# helm install --repo https://charts.bitnami.com/bitnami --version 17.3.7 --set auth.password=redis --set replica.replicaCount=1 --set global.security.allowInsecureImages=true --set image.repository=bitnamilegacy/redis --set sentinel.image.repository=bitnamilegacy/redis-sentinel --set metrics.image.repository=bitnamilegacy/redis-exporter --set volumePermissions.image.repository=bitnamilegacy/os-shell --set kubectl.image.repository=bitnamilegacy/kubectl --set sysctl.image.repository=bitnamilegacy/os-shell airflow-redis redis -# Log in with user01/user01 or user02/user02 ---- -apiVersion: secrets.stackable.tech/v1alpha1 -kind: SecretClass -metadata: - name: openldap-tls -spec: - backend: - autoTls: - ca: - autoGenerate: true - secret: - name: openldap-tls-ca - namespace: default ---- -apiVersion: apps/v1 -kind: StatefulSet -metadata: - name: openldap - labels: - app.kubernetes.io/name: openldap -spec: - selector: - matchLabels: - app.kubernetes.io/name: openldap - serviceName: openldap - replicas: 1 - template: - metadata: - labels: - app.kubernetes.io/name: openldap - spec: - containers: - - name: openldap - image: docker.io/bitnamilegacy/openldap:2.5 - env: - - name: LDAP_ADMIN_USERNAME - value: admin - - name: LDAP_ADMIN_PASSWORD - value: admin - - name: LDAP_USERS - value: user01,user02 - - name: LDAP_PASSWORDS - value: user01,user02 - - name: LDAP_ENABLE_TLS - value: "yes" - - name: LDAP_TLS_CERT_FILE - value: /tls/tls.crt - - name: LDAP_TLS_KEY_FILE - value: /tls/tls.key - - name: LDAP_TLS_CA_FILE - value: /tls/ca.crt - ports: - - name: tls-ldap - containerPort: 1636 - volumeMounts: - - name: tls - mountPath: /tls - volumes: - - name: tls - ephemeral: - volumeClaimTemplate: - metadata: - annotations: - secrets.stackable.tech/class: openldap-tls - secrets.stackable.tech/scope: pod - spec: - storageClassName: secrets.stackable.tech - accessModes: - - ReadWriteOnce - resources: - requests: - storage: "1" ---- -apiVersion: v1 -kind: Service -metadata: - name: openldap - labels: - app.kubernetes.io/name: openldap -spec: - type: ClusterIP - ports: - - name: tls-ldap - port: 1636 - targetPort: tls-ldap - selector: - app.kubernetes.io/name: openldap ---- -apiVersion: authentication.stackable.tech/v1alpha1 -kind: AuthenticationClass -metadata: - name: airflow-with-ldap-insecure-tls-ldap -spec: - provider: - ldap: - hostname: openldap.default.svc.cluster.local - port: 1636 - searchBase: ou=users,dc=example,dc=org - ldapFieldNames: - uid: uid - group: memberof - givenName: givenName - surname: sn - email: mail - bindCredentials: - secretClass: airflow-with-ldap-bind - tls: - verification: - none: {} ---- -apiVersion: secrets.stackable.tech/v1alpha1 -kind: SecretClass -metadata: - name: airflow-with-ldap-bind -spec: - backend: - k8sSearch: - searchNamespace: - pod: {} ---- -apiVersion: v1 -kind: Secret -metadata: - name: airflow-with-ldap-bind - labels: - secrets.stackable.tech/class: airflow-with-ldap-bind -stringData: - user: cn=admin,dc=example,dc=org - password: admin ---- -apiVersion: v1 -kind: Secret -metadata: - name: airflow-with-ldap-server-veri-tls-credentials -type: Opaque -stringData: - adminUser.username: airflow - adminUser.firstname: Airflow - adminUser.lastname: Admin - adminUser.email: airflow@airflow.com - adminUser.password: airflow - connections.sqlalchemyDatabaseUri: postgresql+psycopg2://airflow:airflow@airflow-postgresql.default.svc.cluster.local/airflow - # Only needed when using celery workers (instead of Kubernetes executors) - connections.celeryResultBackend: db+postgresql://airflow:airflow@airflow-postgresql.default.svc.cluster.local/airflow - connections.celeryBrokerUrl: redis://:redis@airflow-redis-master:6379/0 ---- -apiVersion: airflow.stackable.tech/v1alpha1 -kind: AirflowCluster -metadata: - name: airflow-insecure-tls -spec: - image: - productVersion: 3.1.6 - clusterConfig: - loadExamples: true - exposeConfig: true - credentialsSecret: airflow-with-ldap-server-veri-tls-credentials - authentication: - - authenticationClass: airflow-with-ldap-insecure-tls-ldap - userRegistrationRole: Admin - webservers: - roleConfig: - listenerClass: external-unstable - roleGroups: - default: - replicas: 1 - celeryExecutors: - roleGroups: - default: - replicas: 1 - schedulers: - roleGroups: - default: - replicas: 1 diff --git a/examples/simple-airflow-cluster-ldap.yaml b/examples/simple-airflow-cluster-ldap.yaml deleted file mode 100644 index 2cf1b515..00000000 --- a/examples/simple-airflow-cluster-ldap.yaml +++ /dev/null @@ -1,179 +0,0 @@ -# helm install secret-operator oci://oci.stackable.tech/sdp-charts/secret-operator -# helm install commons-operator oci://oci.stackable.tech/sdp-charts/commons-operator -# helm install listener-operator oci://oci.stackable.tech/sdp-charts/listener-operator -# helm install airflow-operator oci://oci.stackable.tech/sdp-charts/airflow-operator -# helm install --repo https://charts.bitnami.com/bitnami --version 12.1.5 --set auth.username=airflow --set auth.password=airflow --set auth.database=airflow --set image.repository=bitnamilegacy/postgresql --set volumePermissions.image.repository=bitnamilegacy/os-shell --set metrics.image.repository=bitnamilegacy/postgres-exporter --set global.security.allowInsecureImages=true airflow-postgresql postgresql -# helm install --repo https://charts.bitnami.com/bitnami --version 17.3.7 --set auth.password=redis --set replica.replicaCount=1 --set global.security.allowInsecureImages=true --set image.repository=bitnamilegacy/redis --set sentinel.image.repository=bitnamilegacy/redis-sentinel --set metrics.image.repository=bitnamilegacy/redis-exporter --set volumePermissions.image.repository=bitnamilegacy/os-shell --set kubectl.image.repository=bitnamilegacy/kubectl --set sysctl.image.repository=bitnamilegacy/os-shell airflow-redis redis -# Log in with user01/user01 or user02/user02 ---- -apiVersion: secrets.stackable.tech/v1alpha1 -kind: SecretClass -metadata: - name: openldap-tls -spec: - backend: - autoTls: - ca: - autoGenerate: true - secret: - name: openldap-tls-ca - namespace: default ---- -apiVersion: apps/v1 -kind: StatefulSet -metadata: - name: openldap - labels: - app.kubernetes.io/name: openldap -spec: - selector: - matchLabels: - app.kubernetes.io/name: openldap - serviceName: openldap - replicas: 1 - template: - metadata: - labels: - app.kubernetes.io/name: openldap - spec: - containers: - - name: openldap - image: docker.io/bitnamilegacy/openldap:2.5 - env: - - name: LDAP_ADMIN_USERNAME - value: admin - - name: LDAP_ADMIN_PASSWORD - value: admin - - name: LDAP_USERS - value: user01,user02 - - name: LDAP_PASSWORDS - value: user01,user02 - - name: LDAP_ENABLE_TLS - value: "yes" - - name: LDAP_TLS_CERT_FILE - value: /tls/tls.crt - - name: LDAP_TLS_KEY_FILE - value: /tls/tls.key - - name: LDAP_TLS_CA_FILE - value: /tls/ca.crt - ports: - - name: tls-ldap - containerPort: 1636 - volumeMounts: - - name: tls - mountPath: /tls - volumes: - - name: tls - ephemeral: - volumeClaimTemplate: - metadata: - annotations: - secrets.stackable.tech/class: openldap-tls - secrets.stackable.tech/scope: pod - spec: - storageClassName: secrets.stackable.tech - accessModes: - - ReadWriteOnce - resources: - requests: - storage: "1" ---- -apiVersion: v1 -kind: Service -metadata: - name: openldap - labels: - app.kubernetes.io/name: openldap -spec: - type: ClusterIP - ports: - - name: tls-ldap - port: 636 - targetPort: tls-ldap - selector: - app.kubernetes.io/name: openldap ---- -apiVersion: authentication.stackable.tech/v1alpha1 -kind: AuthenticationClass -metadata: - name: airflow-with-ldap-server-veri-tls-ldap -spec: - provider: - ldap: - hostname: openldap.default.svc.cluster.local - port: 636 - searchBase: ou=users,dc=example,dc=org - ldapFieldNames: - uid: uid - bindCredentials: - secretClass: airflow-with-ldap-server-veri-tls-ldap-bind - tls: - verification: - server: - caCert: - secretClass: openldap-tls ---- -apiVersion: secrets.stackable.tech/v1alpha1 -kind: SecretClass -metadata: - name: airflow-with-ldap-server-veri-tls-ldap-bind -spec: - backend: - k8sSearch: - searchNamespace: - pod: {} ---- -apiVersion: v1 -kind: Secret -metadata: - name: airflow-with-ldap-server-veri-tls-ldap-bind - labels: - secrets.stackable.tech/class: airflow-with-ldap-server-veri-tls-ldap-bind -stringData: - user: cn=admin,dc=example,dc=org - password: admin ---- -apiVersion: v1 -kind: Secret -metadata: - name: airflow-with-ldap-server-veri-tls-credentials -type: Opaque -stringData: - adminUser.username: airflow - adminUser.firstname: Airflow - adminUser.lastname: Admin - adminUser.email: airflow@airflow.com - adminUser.password: airflow - connections.sqlalchemyDatabaseUri: postgresql+psycopg2://airflow:airflow@airflow-postgresql.default.svc.cluster.local/airflow - # Only needed when using celery workers (instead of Kubernetes executors) - connections.celeryResultBackend: db+postgresql://airflow:airflow@airflow-postgresql.default.svc.cluster.local/airflow - connections.celeryBrokerUrl: redis://:redis@airflow-redis-master:6379/0 ---- -apiVersion: airflow.stackable.tech/v1alpha1 -kind: AirflowCluster -metadata: - name: airflow-with-ldap-server-veri-tls -spec: - image: - productVersion: 3.1.6 - clusterConfig: - loadExamples: true - exposeConfig: true - credentialsSecret: airflow-with-ldap-server-veri-tls-credentials - authentication: - - authenticationClass: airflow-with-ldap-server-veri-tls-ldap - userRegistrationRole: Admin - webservers: - roleConfig: - listenerClass: external-unstable - roleGroups: - default: - replicas: 1 - celeryExecutors: - roleGroups: - default: - replicas: 1 - schedulers: - roleGroups: - default: - replicas: 1 diff --git a/examples/simple-airflow-cluster.yaml b/examples/simple-airflow-cluster.yaml deleted file mode 100644 index 0dd0449e..00000000 --- a/examples/simple-airflow-cluster.yaml +++ /dev/null @@ -1,42 +0,0 @@ ---- -apiVersion: v1 -kind: Secret -metadata: - name: simple-airflow-credentials -type: Opaque -stringData: - adminUser.username: airflow - adminUser.firstname: Airflow - adminUser.lastname: Admin - adminUser.email: airflow@airflow.com - adminUser.password: airflow - connections.sqlalchemyDatabaseUri: postgresql+psycopg2://airflow:airflow@airflow-postgresql.default.svc.cluster.local/airflow - # Only needed when using celery workers (instead of Kubernetes executors) - connections.celeryResultBackend: db+postgresql://airflow:airflow@airflow-postgresql.default.svc.cluster.local/airflow - connections.celeryBrokerUrl: redis://:redis@airflow-redis-master:6379/0 ---- -apiVersion: airflow.stackable.tech/v1alpha1 -kind: AirflowCluster -metadata: - name: airflow -spec: - image: - productVersion: 3.1.6 - clusterConfig: - loadExamples: true - exposeConfig: false - credentialsSecret: simple-airflow-credentials - webservers: - roleConfig: - listenerClass: external-unstable - roleGroups: - default: - replicas: 1 - celeryExecutors: - roleGroups: - default: - replicas: 2 - schedulers: - roleGroups: - default: - replicas: 1 diff --git a/extra/crds.yaml b/extra/crds.yaml index f18b9d79..ac6f6dba 100644 --- a/extra/crds.yaml +++ b/extra/crds.yaml @@ -39,6 +39,66 @@ spec: The celery executor. Deployed with an explicit number of replicas. properties: + broker: + description: Connection information for the celery broker queue. + oneOf: + - required: + - redis + - required: + - generic + properties: + generic: + description: |- + A generic Celery database connection for broker or result backend types not covered by a + dedicated variant. + + Use this when you need a Celery-compatible connection that does not have a first-class + connection type. The complete connection URL is read from a Secret, giving the user full + control over the connection string. + properties: + connectionUrlSecretName: + description: The name of the Secret that contains an `connectionUrl` key with the complete Celery URL. + type: string + required: + - connectionUrlSecretName + type: object + redis: + description: |- + Connection settings for a [Redis](https://redis.io/) instance. + + Redis is commonly used as a Celery message broker or result backend (e.g. for Apache Airflow). + properties: + credentialsSecretName: + description: |- + Name of a Secret containing the `username` and `password` keys used to authenticate + against the Redis server. + type: string + databaseId: + default: 0 + description: |- + Numeric index of the Redis logical database to use. Defaults to `0`. + + Redis supports multiple logical databases within a single instance, identified by an + integer index. Database `0` is the default. + format: uint16 + maximum: 65535.0 + minimum: 0.0 + type: integer + host: + description: Hostname or IP address of the Redis server. + type: string + port: + default: 6379 + description: Port the Redis server is listening on. Defaults to `6379`. + format: uint16 + maximum: 65535.0 + minimum: 0.0 + type: integer + required: + - credentialsSecretName + - host + type: object + type: object cliOverrides: additionalProperties: type: string @@ -515,6 +575,65 @@ spec: for more information. type: object x-kubernetes-preserve-unknown-fields: true + resultBackend: + description: Connection information for the celery backend database. + oneOf: + - required: + - postgresql + - required: + - generic + properties: + generic: + description: |- + A generic Celery database connection for broker or result backend types not covered by a + dedicated variant. + + Use this when you need a Celery-compatible connection that does not have a first-class + connection type. The complete connection URL is read from a Secret, giving the user full + control over the connection string. + properties: + connectionUrlSecretName: + description: The name of the Secret that contains an `connectionUrl` key with the complete Celery URL. + type: string + required: + - connectionUrlSecretName + type: object + postgresql: + description: Connection settings for a [PostgreSQL](https://www.postgresql.org/) database. + properties: + credentialsSecretName: + description: |- + Name of a Secret containing the `username` and `password` keys used to authenticate + against the PostgreSQL server. + type: string + database: + description: Name of the database (schema) to connect to. + type: string + host: + description: Hostname or IP address of the PostgreSQL server. + type: string + parameters: + additionalProperties: + type: string + default: {} + description: |- + Additional map of JDBC connection parameters to append to the connection URL. The given + `HashMap` will be converted to query parameters in the form of + `?param1=value1¶m2=value2`. + type: object + port: + default: 5432 + description: Port the PostgreSQL server is listening on. Defaults to `5432`. + format: uint16 + maximum: 65535.0 + minimum: 0.0 + type: integer + required: + - credentialsSecretName + - database + - host + type: object + type: object roleConfig: default: podDisruptionBudget: @@ -1042,6 +1161,8 @@ spec: type: object type: object required: + - broker + - resultBackend - roleGroups type: object clusterConfig: @@ -1155,18 +1276,16 @@ spec: - configMapName type: object type: object - credentialsSecret: + credentialsSecretName: description: |- - The name of the Secret object containing the admin user credentials and database connection details. - Read the + The name of the Secret object containing the admin user credentials. Read the [getting started guide first steps](https://docs.stackable.tech/home/nightly/airflow/getting_started/first_steps) to find out more. type: string dagsGitSync: default: [] description: |- - The `gitSync` settings allow configuring DAGs to mount via `git-sync`. - Learn more in the + The `gitSync` settings allow configuring DAGs to mount via `git-sync`. Learn more in the [mounting DAGs documentation](https://docs.stackable.tech/home/nightly/airflow/usage-guide/mounting-dags#_via_git_sync). items: properties: @@ -1311,7 +1430,7 @@ spec: type: object exposeConfig: default: false - description: for internal use only - not for production use. + description: For internal use only - not for production use. type: boolean loadExamples: default: false @@ -1319,6 +1438,64 @@ spec: Whether to load example DAGs or not; defaults to false. The examples are used in the [getting started guide](https://docs.stackable.tech/home/nightly/airflow/getting_started/). type: boolean + metadataDatabase: + description: Configure the database where Airflow stores all it's internal metadata + oneOf: + - required: + - postgresql + - required: + - generic + properties: + generic: + description: |- + A generic SQLAlchemy database connection for database types not covered by a dedicated variant. + + Use this when you need to connect to a SQLAlchemy-compatible database that does not have a + first-class connection type. The complete connection URL is read from a Secret, giving the user + full control over the connection string including any driver-specific options. + properties: + connectionUrlSecretName: + description: The name of the Secret that contains an `connectionUrl` key with the complete SQLAlchemy URL. + type: string + required: + - connectionUrlSecretName + type: object + postgresql: + description: Connection settings for a [PostgreSQL](https://www.postgresql.org/) database. + properties: + credentialsSecretName: + description: |- + Name of a Secret containing the `username` and `password` keys used to authenticate + against the PostgreSQL server. + type: string + database: + description: Name of the database (schema) to connect to. + type: string + host: + description: Hostname or IP address of the PostgreSQL server. + type: string + parameters: + additionalProperties: + type: string + default: {} + description: |- + Additional map of JDBC connection parameters to append to the connection URL. The given + `HashMap` will be converted to query parameters in the form of + `?param1=value1¶m2=value2`. + type: object + port: + default: 5432 + description: Port the PostgreSQL server is listening on. Defaults to `5432`. + format: uint16 + maximum: 65535.0 + minimum: 0.0 + type: integer + required: + - credentialsSecretName + - database + - host + type: object + type: object vectorAggregatorConfigMapName: description: |- Name of the Vector aggregator [discovery ConfigMap](https://docs.stackable.tech/home/nightly/concepts/service_discovery). @@ -1342,7 +1519,8 @@ spec: x-kubernetes-preserve-unknown-fields: true type: array required: - - credentialsSecret + - credentialsSecretName + - metadataDatabase type: object clusterOperation: default: @@ -2438,7 +2616,7 @@ spec: type: string type: object kubernetesExecutors: - description: With the Kuberentes executor, executor Pods are created on demand. + description: With the Kubernetes executor, executor Pods are created on demand. properties: cliOverrides: additionalProperties: @@ -6044,6 +6222,66 @@ spec: The celery executor. Deployed with an explicit number of replicas. properties: + broker: + description: Connection information for the celery broker queue. + oneOf: + - required: + - redis + - required: + - generic + properties: + generic: + description: |- + A generic Celery database connection for broker or result backend types not covered by a + dedicated variant. + + Use this when you need a Celery-compatible connection that does not have a first-class + connection type. The complete connection URL is read from a Secret, giving the user full + control over the connection string. + properties: + connectionUrlSecretName: + description: The name of the Secret that contains an `connectionUrl` key with the complete Celery URL. + type: string + required: + - connectionUrlSecretName + type: object + redis: + description: |- + Connection settings for a [Redis](https://redis.io/) instance. + + Redis is commonly used as a Celery message broker or result backend (e.g. for Apache Airflow). + properties: + credentialsSecretName: + description: |- + Name of a Secret containing the `username` and `password` keys used to authenticate + against the Redis server. + type: string + databaseId: + default: 0 + description: |- + Numeric index of the Redis logical database to use. Defaults to `0`. + + Redis supports multiple logical databases within a single instance, identified by an + integer index. Database `0` is the default. + format: uint16 + maximum: 65535.0 + minimum: 0.0 + type: integer + host: + description: Hostname or IP address of the Redis server. + type: string + port: + default: 6379 + description: Port the Redis server is listening on. Defaults to `6379`. + format: uint16 + maximum: 65535.0 + minimum: 0.0 + type: integer + required: + - credentialsSecretName + - host + type: object + type: object cliOverrides: additionalProperties: type: string @@ -6520,6 +6758,65 @@ spec: for more information. type: object x-kubernetes-preserve-unknown-fields: true + resultBackend: + description: Connection information for the celery backend database. + oneOf: + - required: + - postgresql + - required: + - generic + properties: + generic: + description: |- + A generic Celery database connection for broker or result backend types not covered by a + dedicated variant. + + Use this when you need a Celery-compatible connection that does not have a first-class + connection type. The complete connection URL is read from a Secret, giving the user full + control over the connection string. + properties: + connectionUrlSecretName: + description: The name of the Secret that contains an `connectionUrl` key with the complete Celery URL. + type: string + required: + - connectionUrlSecretName + type: object + postgresql: + description: Connection settings for a [PostgreSQL](https://www.postgresql.org/) database. + properties: + credentialsSecretName: + description: |- + Name of a Secret containing the `username` and `password` keys used to authenticate + against the PostgreSQL server. + type: string + database: + description: Name of the database (schema) to connect to. + type: string + host: + description: Hostname or IP address of the PostgreSQL server. + type: string + parameters: + additionalProperties: + type: string + default: {} + description: |- + Additional map of JDBC connection parameters to append to the connection URL. The given + `HashMap` will be converted to query parameters in the form of + `?param1=value1¶m2=value2`. + type: object + port: + default: 5432 + description: Port the PostgreSQL server is listening on. Defaults to `5432`. + format: uint16 + maximum: 65535.0 + minimum: 0.0 + type: integer + required: + - credentialsSecretName + - database + - host + type: object + type: object roleConfig: default: podDisruptionBudget: @@ -7047,6 +7344,8 @@ spec: type: object type: object required: + - broker + - resultBackend - roleGroups type: object clusterConfig: @@ -7160,18 +7459,16 @@ spec: - configMapName type: object type: object - credentialsSecret: + credentialsSecretName: description: |- - The name of the Secret object containing the admin user credentials and database connection details. - Read the + The name of the Secret object containing the admin user credentials. Read the [getting started guide first steps](https://docs.stackable.tech/home/nightly/airflow/getting_started/first_steps) to find out more. type: string dagsGitSync: default: [] description: |- - The `gitSync` settings allow configuring DAGs to mount via `git-sync`. - Learn more in the + The `gitSync` settings allow configuring DAGs to mount via `git-sync`. Learn more in the [mounting DAGs documentation](https://docs.stackable.tech/home/nightly/airflow/usage-guide/mounting-dags#_via_git_sync). items: properties: @@ -7292,7 +7589,7 @@ spec: type: object exposeConfig: default: false - description: for internal use only - not for production use. + description: For internal use only - not for production use. type: boolean loadExamples: default: false @@ -7300,6 +7597,64 @@ spec: Whether to load example DAGs or not; defaults to false. The examples are used in the [getting started guide](https://docs.stackable.tech/home/nightly/airflow/getting_started/). type: boolean + metadataDatabase: + description: Configure the database where Airflow stores all it's internal metadata + oneOf: + - required: + - postgresql + - required: + - generic + properties: + generic: + description: |- + A generic SQLAlchemy database connection for database types not covered by a dedicated variant. + + Use this when you need to connect to a SQLAlchemy-compatible database that does not have a + first-class connection type. The complete connection URL is read from a Secret, giving the user + full control over the connection string including any driver-specific options. + properties: + connectionUrlSecretName: + description: The name of the Secret that contains an `connectionUrl` key with the complete SQLAlchemy URL. + type: string + required: + - connectionUrlSecretName + type: object + postgresql: + description: Connection settings for a [PostgreSQL](https://www.postgresql.org/) database. + properties: + credentialsSecretName: + description: |- + Name of a Secret containing the `username` and `password` keys used to authenticate + against the PostgreSQL server. + type: string + database: + description: Name of the database (schema) to connect to. + type: string + host: + description: Hostname or IP address of the PostgreSQL server. + type: string + parameters: + additionalProperties: + type: string + default: {} + description: |- + Additional map of JDBC connection parameters to append to the connection URL. The given + `HashMap` will be converted to query parameters in the form of + `?param1=value1¶m2=value2`. + type: object + port: + default: 5432 + description: Port the PostgreSQL server is listening on. Defaults to `5432`. + format: uint16 + maximum: 65535.0 + minimum: 0.0 + type: integer + required: + - credentialsSecretName + - database + - host + type: object + type: object vectorAggregatorConfigMapName: description: |- Name of the Vector aggregator [discovery ConfigMap](https://docs.stackable.tech/home/nightly/concepts/service_discovery). @@ -7323,7 +7678,8 @@ spec: x-kubernetes-preserve-unknown-fields: true type: array required: - - credentialsSecret + - credentialsSecretName + - metadataDatabase type: object clusterOperation: default: @@ -8419,7 +8775,7 @@ spec: type: string type: object kubernetesExecutors: - description: With the Kuberentes executor, executor Pods are created on demand. + description: With the Kubernetes executor, executor Pods are created on demand. properties: cliOverrides: additionalProperties: diff --git a/rust/operator-binary/src/airflow_controller.rs b/rust/operator-binary/src/airflow_controller.rs index a54c7e4f..b6d95f48 100644 --- a/rust/operator-binary/src/airflow_controller.rs +++ b/rust/operator-binary/src/airflow_controller.rs @@ -39,6 +39,13 @@ use stackable_operator::{ authentication::{core as auth_core, ldap}, git_sync, listener, }, + database_connections::{ + TemplatingMechanism, + drivers::{ + celery::CeleryDatabaseConnectionDetails, + sqlalchemy::SqlAlchemyDatabaseConnectionDetails, + }, + }, k8s_openapi::{ self, DeepMerge, api::{ @@ -400,6 +407,31 @@ pub async fn reconcile_airflow( ) .await .context(InvalidAuthorizationConfigSnafu)?; + // We don't have a config file, but do everything via env substitution + + let templating_mechanism = TemplatingMechanism::BashEnvSubstitution; + let metadata_database_connection_details = airflow + .spec + .cluster_config + .metadata_database + .sqlalchemy_connection_details_with_templating("METADATA", &templating_mechanism); + let celery_database_connection_details = match &airflow.spec.executor { + AirflowExecutor::CeleryExecutors { + result_backend: celery_result_backend, + broker: celery_broker, + .. + } => { + let celery_result_backend = celery_result_backend + .celery_connection_details_with_templating( + "CELERY_RESULT_BACKEND", + &templating_mechanism, + ); + let celery_broker = celery_broker + .celery_connection_details_with_templating("CELERY_BROKER", &templating_mechanism); + Some((celery_result_backend, celery_broker)) + } + _ => None, + }; let mut roles = HashMap::new(); @@ -462,13 +494,14 @@ pub async fn reconcile_airflow( // if the kubernetes executor is specified, in place of a worker role that will be in the role // collection there will be a pod template created to be used for pod provisioning - if let AirflowExecutor::KubernetesExecutor { + if let AirflowExecutor::KubernetesExecutors { common_configuration, } = &airflow_executor { build_executor_template( airflow, common_configuration, + &metadata_database_connection_details, &resolved_product_image, &authentication_config, &authorization_config, @@ -645,6 +678,8 @@ pub async fn reconcile_airflow( rolegroup_config, &authentication_config, &authorization_config, + &metadata_database_connection_details, + &celery_database_connection_details, &rbac_sa, &merged_airflow_config, airflow_executor, @@ -686,6 +721,7 @@ pub async fn reconcile_airflow( async fn build_executor_template( airflow: &v1alpha2::AirflowCluster, common_config: &AirflowExecutorCommonConfiguration, + metadata_database_connection_details: &SqlAlchemyDatabaseConnectionDetails, resolved_product_image: &ResolvedProductImage, authentication_config: &AirflowClientAuthenticationDetailsResolved, authorization_config: &AirflowAuthorizationResolved, @@ -735,6 +771,7 @@ async fn build_executor_template( airflow, resolved_product_image, authentication_config, + metadata_database_connection_details, &rbac_sa.name_unchecked(), &merged_executor_config, &common_config.env_overrides, @@ -933,6 +970,11 @@ fn build_server_rolegroup_statefulset( rolegroup_config: &HashMap>, authentication_config: &AirflowClientAuthenticationDetailsResolved, authorization_config: &AirflowAuthorizationResolved, + metadata_database_connection_details: &SqlAlchemyDatabaseConnectionDetails, + celery_database_connection_details: &Option<( + CeleryDatabaseConnectionDetails, + CeleryDatabaseConnectionDetails, + )>, service_account: &ServiceAccount, merged_airflow_config: &AirflowConfig, executor: &AirflowExecutor, @@ -1019,6 +1061,8 @@ fn build_server_rolegroup_statefulset( executor, authentication_config, authorization_config, + metadata_database_connection_details, + celery_database_connection_details, git_sync_resources, resolved_product_image, ) @@ -1039,7 +1083,7 @@ fn build_server_rolegroup_statefulset( .add_volume_mount(LOG_VOLUME_NAME, STACKABLE_LOG_DIR) .context(AddVolumeMountSnafu)?; - if let AirflowExecutor::KubernetesExecutor { .. } = executor { + if let AirflowExecutor::KubernetesExecutors { .. } = executor { airflow_container .add_volume_mount(TEMPLATE_VOLUME_NAME, TEMPLATE_LOCATION) .context(AddVolumeMountSnafu)?; @@ -1087,7 +1131,7 @@ fn build_server_rolegroup_statefulset( // and registered. This will result in ModuleNotFoundError errors. This can be avoided // by running a one-off git-sync process in an init-container so that all DAG // dependencies are fully loaded. The sidecar git-sync is then used for regular updates. - let use_git_sync_init_containers = matches!(executor, AirflowExecutor::CeleryExecutor { .. }); + let use_git_sync_init_containers = matches!(executor, AirflowExecutor::CeleryExecutors { .. }); add_git_sync_resources( &mut pb, &mut airflow_container, @@ -1096,6 +1140,12 @@ fn build_server_rolegroup_statefulset( use_git_sync_init_containers, )?; + metadata_database_connection_details.add_to_container(&mut airflow_container); + if let Some((celery_result_backend, celery_broker)) = celery_database_connection_details { + celery_result_backend.add_to_container(&mut airflow_container); + celery_broker.add_to_container(&mut airflow_container); + } + pb.add_container(airflow_container.build()); let metrics_container = ContainerBuilder::new("metrics") @@ -1140,7 +1190,7 @@ fn build_server_rolegroup_statefulset( )) .context(AddVolumeSnafu)?; - if let AirflowExecutor::KubernetesExecutor { .. } = executor { + if let AirflowExecutor::KubernetesExecutors { .. } = executor { pb.add_volume( VolumeBuilder::new(TEMPLATE_VOLUME_NAME) .with_config_map(airflow.executor_template_configmap_name()) @@ -1252,6 +1302,7 @@ fn build_executor_template_config_map( airflow: &v1alpha2::AirflowCluster, resolved_product_image: &ResolvedProductImage, authentication_config: &AirflowClientAuthenticationDetailsResolved, + metadata_database_connection_details: &SqlAlchemyDatabaseConnectionDetails, sa_name: &str, merged_executor_config: &ExecutorConfig, env_overrides: &HashMap, @@ -1299,6 +1350,7 @@ fn build_executor_template_config_map( airflow, env_overrides, merged_executor_config, + metadata_database_connection_details, git_sync_resources, resolved_product_image, )) @@ -1319,6 +1371,8 @@ fn build_executor_template_config_map( true, )?; + metadata_database_connection_details.add_to_container(&mut airflow_container); + pb.add_container(airflow_container.build()); pb.add_volumes(airflow.volumes().clone()) .context(AddVolumeSnafu)?; diff --git a/rust/operator-binary/src/crd/affinity.rs b/rust/operator-binary/src/crd/affinity.rs index 3fe53e1d..6ad8c1eb 100644 --- a/rust/operator-binary/src/crd/affinity.rs +++ b/rust/operator-binary/src/crd/affinity.rs @@ -71,12 +71,26 @@ mod tests { image: productVersion: 3.1.6 clusterConfig: - credentialsSecret: airflow-credentials + credentialsSecretName: airflow-admin-credentials + metadataDatabase: + postgresql: + host: airflow-postgresql + database: airflow + credentialsSecretName: airflow-postgresql-credentials webservers: roleGroups: default: replicas: 1 celeryExecutors: + resultBackend: + postgresql: + host: airflow-postgresql + database: airflow + credentialsSecretName: airflow-postgresql-credentials + broker: + redis: + host: airflow-redis-master + credentialsSecretName: airflow-redis-credentials roleGroups: default: replicas: 2 @@ -165,7 +179,12 @@ mod tests { image: productVersion: 3.1.6 clusterConfig: - credentialsSecret: airflow-credentials + credentialsSecretName: airflow-admin-credentials + metadataDatabase: + postgresql: + host: airflow-postgresql + database: airflow + credentialsSecretName: airflow-postgresql-credentials webservers: roleGroups: default: @@ -235,8 +254,8 @@ mod tests { }; let executor_config = match &airflow.spec.executor { - AirflowExecutor::CeleryExecutor { .. } => unreachable!(), - AirflowExecutor::KubernetesExecutor { + AirflowExecutor::CeleryExecutors { .. } => unreachable!(), + AirflowExecutor::KubernetesExecutors { common_configuration, } => &common_configuration.config, }; diff --git a/rust/operator-binary/src/crd/databases.rs b/rust/operator-binary/src/crd/databases.rs new file mode 100644 index 00000000..dee0785d --- /dev/null +++ b/rust/operator-binary/src/crd/databases.rs @@ -0,0 +1,76 @@ +use std::ops::Deref; + +use serde::{Deserialize, Serialize}; +use stackable_operator::{ + database_connections::{ + databases::{postgresql::PostgresqlConnection, redis::RedisConnection}, + drivers::{ + celery::{CeleryDatabaseConnection, GenericCeleryDatabaseConnection}, + sqlalchemy::{GenericSqlAlchemyDatabaseConnection, SqlAlchemyDatabaseConnection}, + }, + }, + schemars::{self, JsonSchema}, +}; + +#[derive(Clone, Debug, Deserialize, JsonSchema, PartialEq, Serialize)] +#[serde(rename_all = "camelCase")] +pub enum MetadataDatabaseConnection { + // Docs are on the struct + Postgresql(PostgresqlConnection), + + // Docs are on the struct + Generic(GenericSqlAlchemyDatabaseConnection), +} + +impl Deref for MetadataDatabaseConnection { + type Target = dyn SqlAlchemyDatabaseConnection; + + fn deref(&self) -> &Self::Target { + match self { + Self::Postgresql(p) => p, + Self::Generic(g) => g, + } + } +} + +#[derive(Clone, Debug, Deserialize, JsonSchema, PartialEq, Serialize)] +#[serde(rename_all = "camelCase")] +pub enum CeleryResultBackendConnection { + // Docs are on the struct + Postgresql(PostgresqlConnection), + + // Docs are on the struct + Generic(GenericCeleryDatabaseConnection), +} + +impl Deref for CeleryResultBackendConnection { + type Target = dyn CeleryDatabaseConnection; + + fn deref(&self) -> &Self::Target { + match self { + Self::Postgresql(p) => p, + Self::Generic(g) => g, + } + } +} + +#[derive(Clone, Debug, Deserialize, JsonSchema, PartialEq, Serialize)] +#[serde(rename_all = "camelCase")] +pub enum CeleryBrokerConnection { + // Docs are on the struct + Redis(RedisConnection), + + // Docs are on the struct + Generic(GenericCeleryDatabaseConnection), +} + +impl Deref for CeleryBrokerConnection { + type Target = dyn CeleryDatabaseConnection; + + fn deref(&self) -> &Self::Target { + match self { + Self::Redis(r) => r, + Self::Generic(g) => g, + } + } +} diff --git a/rust/operator-binary/src/crd/mod.rs b/rust/operator-binary/src/crd/mod.rs index e02c2b9a..e3eedf6a 100644 --- a/rust/operator-binary/src/crd/mod.rs +++ b/rust/operator-binary/src/crd/mod.rs @@ -53,6 +53,9 @@ use crate::{ AirflowAuthenticationClassResolved, AirflowClientAuthenticationDetails, AirflowClientAuthenticationDetailsResolved, }, + databases::{ + CeleryBrokerConnection, CeleryResultBackendConnection, MetadataDatabaseConnection, + }, }, util::role_service_name, }; @@ -60,6 +63,7 @@ use crate::{ pub mod affinity; pub mod authentication; pub mod authorization; +pub mod databases; pub mod internal_secret; pub const APP_NAME: &str = "airflow"; @@ -275,14 +279,15 @@ pub mod versioned { #[serde(skip_serializing_if = "Option::is_none")] pub authorization: Option, - /// The name of the Secret object containing the admin user credentials and database connection details. - /// Read the + /// Configure the database where Airflow stores all it's internal metadata + pub metadata_database: MetadataDatabaseConnection, + + /// The name of the Secret object containing the admin user credentials. Read the /// [getting started guide first steps](DOCS_BASE_URL_PLACEHOLDER/airflow/getting_started/first_steps) /// to find out more. - pub credentials_secret: String, + pub credentials_secret_name: String, - /// The `gitSync` settings allow configuring DAGs to mount via `git-sync`. - /// Learn more in the + /// The `gitSync` settings allow configuring DAGs to mount via `git-sync`. Learn more in the /// [mounting DAGs documentation](DOCS_BASE_URL_PLACEHOLDER/airflow/usage-guide/mounting-dags#_via_git_sync). #[serde(default)] #[versioned( @@ -291,7 +296,7 @@ pub mod versioned { )] pub dags_git_sync: Vec, - /// for internal use only - not for production use. + /// For internal use only - not for production use. #[serde(default)] pub expose_config: bool, @@ -412,8 +417,8 @@ impl v1alpha2::AirflowCluster { AirflowRole::DagProcessor => self.spec.dag_processors.to_owned(), AirflowRole::Triggerer => self.spec.triggerers.to_owned(), AirflowRole::Worker => { - if let AirflowExecutor::CeleryExecutor { config } = &self.spec.executor { - Some(config.clone()) + if let AirflowExecutor::CeleryExecutors { config, .. } = &self.spec.executor { + Some(*config.clone()) } else { None } @@ -554,6 +559,7 @@ pub struct AirflowAuthorization { pub struct AirflowOpaConfig { #[serde(flatten)] pub opa: OpaConfig, + #[serde(default)] pub cache: UserInformationCache, } @@ -812,7 +818,7 @@ impl AirflowRole { .context(UnknownAirflowRoleSnafu { role, roles })?, ), AirflowRole::Worker => { - if let AirflowExecutor::CeleryExecutor { config } = &airflow.spec.executor { + if let AirflowExecutor::CeleryExecutors { config, .. } = &airflow.spec.executor { config } else { return Err(Error::NoRoleForExecutorFailure); @@ -842,24 +848,40 @@ fn container_debug_command() -> String { format!("containerdebug --output={STACKABLE_LOG_DIR}/containerdebug-state.json --loop &") } -#[derive(Clone, Debug, Deserialize, Display, JsonSchema, PartialEq, Serialize)] +#[derive(Clone, Debug, Deserialize, JsonSchema, PartialEq, Serialize)] +#[serde(rename_all = "camelCase")] pub enum AirflowExecutor { /// The celery executor. /// Deployed with an explicit number of replicas. - #[serde(rename = "celeryExecutors")] - CeleryExecutor { + #[serde(rename_all = "camelCase")] + CeleryExecutors { #[serde(flatten)] - config: AirflowRoleType, + config: Box, + + /// Connection information for the celery backend database. + result_backend: CeleryResultBackendConnection, + + /// Connection information for the celery broker queue. + broker: CeleryBrokerConnection, }, - /// With the Kuberentes executor, executor Pods are created on demand. - #[serde(rename = "kubernetesExecutors")] - KubernetesExecutor { + /// With the Kubernetes executor, executor Pods are created on demand. + KubernetesExecutors { #[serde(flatten)] - common_configuration: AirflowExecutorCommonConfiguration, + common_configuration: Box, }, } +impl AirflowExecutor { + /// Name of the executor as expected to be passed via `AIRFLOW__CORE__EXECUTOR` + pub fn as_airflow_core_executor(&self) -> &'static str { + match self { + AirflowExecutor::CeleryExecutors { .. } => "CeleryExecutor", + AirflowExecutor::KubernetesExecutors { .. } => "KubernetesExecutor", + } + } +} + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, Debug, Default, JsonSchema, PartialEq, Fragment)] #[fragment_attrs( @@ -1105,7 +1127,12 @@ mod tests { clusterConfig: loadExamples: true exposeConfig: true - credentialsSecret: simple-airflow-credentials + credentialsSecretName: airflow-admin-credentials + metadataDatabase: + postgresql: + host: airflow-postgresql + database: airflow + credentialsSecretName: airflow-postgresql-credentials webservers: roleGroups: default: @@ -1130,7 +1157,10 @@ mod tests { assert_eq!("3.1.6", &resolved_airflow_image.product_version); - assert_eq!("KubernetesExecutor", cluster.spec.executor.to_string()); + assert_eq!( + "KubernetesExecutor", + cluster.spec.executor.as_airflow_core_executor() + ); assert!(cluster.spec.cluster_config.load_examples); assert!(cluster.spec.cluster_config.expose_config); // defaults to true diff --git a/rust/operator-binary/src/env_vars.rs b/rust/operator-binary/src/env_vars.rs index d67c1c33..b4828dbe 100644 --- a/rust/operator-binary/src/env_vars.rs +++ b/rust/operator-binary/src/env_vars.rs @@ -8,6 +8,9 @@ use snafu::Snafu; use stackable_operator::{ commons::product_image_selection::ResolvedProductImage, crd::{authentication::oidc, git_sync}, + database_connections::drivers::{ + celery::CeleryDatabaseConnectionDetails, sqlalchemy::SqlAlchemyDatabaseConnectionDetails, + }, k8s_openapi::api::core::v1::EnvVar, kube::ResourceExt, product_logging::framework::create_vector_shutdown_file_command, @@ -80,11 +83,15 @@ pub fn build_airflow_statefulset_envs( executor: &AirflowExecutor, auth_config: &AirflowClientAuthenticationDetailsResolved, authorization_config: &AirflowAuthorizationResolved, + metadata_database_connection_details: &SqlAlchemyDatabaseConnectionDetails, + celery_database_connection_details: &Option<( + CeleryDatabaseConnectionDetails, + CeleryDatabaseConnectionDetails, + )>, git_sync_resources: &git_sync::v1alpha2::GitSyncResources, resolved_product_image: &ResolvedProductImage, ) -> Result, Error> { let mut env: BTreeMap = BTreeMap::new(); - let secret = airflow.spec.cluster_config.credentials_secret.as_str(); let internal_secret_name = airflow.shared_internal_secret_secret_name(); env.extend(static_envs(git_sync_resources)); @@ -126,31 +133,29 @@ pub fn build_airflow_statefulset_envs( env.insert( AIRFLOW_DATABASE_SQL_ALCHEMY_CONN.into(), - env_var_from_secret( - AIRFLOW_DATABASE_SQL_ALCHEMY_CONN, - secret, - "connections.sqlalchemyDatabaseUri", - ), + EnvVar { + name: AIRFLOW_DATABASE_SQL_ALCHEMY_CONN.into(), + value: Some(metadata_database_connection_details.url_template.clone()), + ..Default::default() + }, ); - - // Redis is only needed when celery executors are used - // see https://github.com/stackabletech/airflow-operator/issues/424 for details - if matches!(executor, AirflowExecutor::CeleryExecutor { .. }) { + // Only needed when celery executors are used + if let Some((celery_result_backend, celery_broker)) = celery_database_connection_details { env.insert( AIRFLOW_CELERY_RESULT_BACKEND.into(), - env_var_from_secret( - AIRFLOW_CELERY_RESULT_BACKEND, - secret, - "connections.celeryResultBackend", - ), + EnvVar { + name: AIRFLOW_CELERY_RESULT_BACKEND.into(), + value: Some(celery_result_backend.url_template.clone()), + ..Default::default() + }, ); env.insert( AIRFLOW_CELERY_BROKER_URL.into(), - env_var_from_secret( - AIRFLOW_CELERY_BROKER_URL, - secret, - "connections.celeryBrokerUrl", - ), + EnvVar { + name: AIRFLOW_CELERY_BROKER_URL.into(), + value: Some(celery_broker.url_template.clone()), + ..Default::default() + }, ); } @@ -199,12 +204,12 @@ pub fn build_airflow_statefulset_envs( AIRFLOW_CORE_EXECUTOR.into(), EnvVar { name: AIRFLOW_CORE_EXECUTOR.into(), - value: Some(executor.to_string()), + value: Some(executor.as_airflow_core_executor().to_owned()), ..Default::default() }, ); - if let AirflowExecutor::KubernetesExecutor { .. } = executor { + if let AirflowExecutor::KubernetesExecutors { .. } = executor { env.insert( AIRFLOW_KUBERNETES_EXECUTOR_POD_TEMPLATE_FILE.into(), EnvVar { @@ -227,7 +232,7 @@ pub fn build_airflow_statefulset_envs( // Database initialization is limited to the scheduler. // See https://github.com/stackabletech/airflow-operator/issues/259 AirflowRole::Scheduler => { - let secret = &airflow.spec.cluster_config.credentials_secret; + let secret = &airflow.spec.cluster_config.credentials_secret_name; env.insert( ADMIN_USERNAME.into(), env_var_from_secret(ADMIN_USERNAME, secret, "adminUser.username"), @@ -372,19 +377,19 @@ pub fn build_airflow_template_envs( airflow: &v1alpha2::AirflowCluster, env_overrides: &HashMap, config: &ExecutorConfig, + metadata_database_connection_details: &SqlAlchemyDatabaseConnectionDetails, git_sync_resources: &git_sync::v1alpha2::GitSyncResources, resolved_product_image: &ResolvedProductImage, ) -> Vec { let mut env: BTreeMap = BTreeMap::new(); - let secret = airflow.spec.cluster_config.credentials_secret.as_str(); env.insert( AIRFLOW_DATABASE_SQL_ALCHEMY_CONN.into(), - env_var_from_secret( - AIRFLOW_DATABASE_SQL_ALCHEMY_CONN, - secret, - "connections.sqlalchemyDatabaseUri", - ), + EnvVar { + name: AIRFLOW_DATABASE_SQL_ALCHEMY_CONN.into(), + value: Some(metadata_database_connection_details.url_template.clone()), + ..Default::default() + }, ); env.insert( diff --git a/rust/operator-binary/src/operations/pdb.rs b/rust/operator-binary/src/operations/pdb.rs index 9f010800..b3261678 100644 --- a/rust/operator-binary/src/operations/pdb.rs +++ b/rust/operator-binary/src/operations/pdb.rs @@ -40,8 +40,8 @@ pub async fn add_pdbs( AirflowRole::DagProcessor => max_unavailable_dag_processors(), AirflowRole::Triggerer => max_unavailable_triggerers(), AirflowRole::Worker => match airflow.spec.executor { - AirflowExecutor::CeleryExecutor { .. } => max_unavailable_workers(), - AirflowExecutor::KubernetesExecutor { .. } => { + AirflowExecutor::CeleryExecutors { .. } => max_unavailable_workers(), + AirflowExecutor::KubernetesExecutors { .. } => { // In case Airflow creates the Pods, we don't want to influence that. return Ok(()); } diff --git a/tests/templates/kuttl/ca-cert/15-secrets.yaml b/tests/templates/kuttl/ca-cert/15-secrets.yaml index a52851a3..a65f61de 100644 --- a/tests/templates/kuttl/ca-cert/15-secrets.yaml +++ b/tests/templates/kuttl/ca-cert/15-secrets.yaml @@ -13,7 +13,7 @@ data: apiVersion: v1 kind: Secret metadata: - name: test-airflow-credentials + name: airflow-admin-credentials type: Opaque stringData: adminUser.username: airflow @@ -21,7 +21,14 @@ stringData: adminUser.lastname: Admin adminUser.email: airflow@airflow.com adminUser.password: airflow - connections.sqlalchemyDatabaseUri: postgresql+psycopg2://airflow:airflow@airflow-postgresql/airflow +--- +apiVersion: v1 +kind: Secret +metadata: + name: airflow-postgresql-credentials +stringData: + username: airflow + password: airflow --- apiVersion: v1 kind: Secret diff --git a/tests/templates/kuttl/ca-cert/25_airflow-wrong-cert.yaml.j2 b/tests/templates/kuttl/ca-cert/25_airflow-wrong-cert.yaml.j2 index 6e7244ec..71d47e93 100644 --- a/tests/templates/kuttl/ca-cert/25_airflow-wrong-cert.yaml.j2 +++ b/tests/templates/kuttl/ca-cert/25_airflow-wrong-cert.yaml.j2 @@ -26,7 +26,12 @@ spec: {% if lookup('env', 'VECTOR_AGGREGATOR') %} vectorAggregatorConfigMapName: vector-aggregator-discovery {% endif %} - credentialsSecret: test-airflow-credentials + credentialsSecretName: airflow-admin-credentials + metadataDatabase: + postgresql: + host: airflow-postgresql + database: airflow + credentialsSecretName: airflow-postgresql-credentials dagsGitSync: - repo: https://git-proxy.$NAMESPACE.svc.cluster.local/stackable-airflow/dags credentials: diff --git a/tests/templates/kuttl/ca-cert/30_airflow-cluster.yaml.j2 b/tests/templates/kuttl/ca-cert/30_airflow-cluster.yaml.j2 index 2e16038b..49a6a7f1 100644 --- a/tests/templates/kuttl/ca-cert/30_airflow-cluster.yaml.j2 +++ b/tests/templates/kuttl/ca-cert/30_airflow-cluster.yaml.j2 @@ -26,7 +26,12 @@ spec: {% if lookup('env', 'VECTOR_AGGREGATOR') %} vectorAggregatorConfigMapName: vector-aggregator-discovery {% endif %} - credentialsSecret: test-airflow-credentials + credentialsSecretName: airflow-admin-credentials + metadataDatabase: + postgresql: + host: airflow-postgresql + database: airflow + credentialsSecretName: airflow-postgresql-credentials dagsGitSync: - repo: https://git-proxy.$NAMESPACE.svc.cluster.local/stackable-airflow/dags credentials: diff --git a/tests/templates/kuttl/cluster-operation/08-install-airflow.yaml.j2 b/tests/templates/kuttl/cluster-operation/08-install-airflow.yaml.j2 index 3aa50e6e..20d3fef4 100644 --- a/tests/templates/kuttl/cluster-operation/08-install-airflow.yaml.j2 +++ b/tests/templates/kuttl/cluster-operation/08-install-airflow.yaml.j2 @@ -7,7 +7,7 @@ timeout: 480 apiVersion: v1 kind: Secret metadata: - name: test-airflow-credentials + name: airflow-admin-credentials type: Opaque stringData: adminUser.username: airflow @@ -15,9 +15,22 @@ stringData: adminUser.lastname: Admin adminUser.email: airflow@airflow.com adminUser.password: airflow - connections.sqlalchemyDatabaseUri: postgresql+psycopg2://airflow:airflow@airflow-postgresql/airflow - connections.celeryResultBackend: db+postgresql://airflow:airflow@airflow-postgresql/airflow - connections.celeryBrokerUrl: redis://:redis@airflow-redis-master:6379/0 +--- +apiVersion: v1 +kind: Secret +metadata: + name: airflow-postgresql-credentials +stringData: + username: airflow + password: airflow +--- +apiVersion: v1 +kind: Secret +metadata: + name: airflow-redis-credentials +stringData: + username: "" + password: redis --- apiVersion: airflow.stackable.tech/v1alpha1 kind: AirflowCluster @@ -36,7 +49,12 @@ spec: {% if lookup('env', 'VECTOR_AGGREGATOR') %} vectorAggregatorConfigMapName: vector-aggregator-discovery {% endif %} - credentialsSecret: test-airflow-credentials + credentialsSecretName: airflow-admin-credentials + metadataDatabase: + postgresql: + host: airflow-postgresql + database: airflow + credentialsSecretName: airflow-postgresql-credentials webservers: roleConfig: listenerClass: external-unstable @@ -47,6 +65,15 @@ spec: default: replicas: 1 celeryExecutors: + resultBackend: + postgresql: + host: airflow-postgresql + database: airflow + credentialsSecretName: airflow-postgresql-credentials + broker: + redis: + host: airflow-redis-master + credentialsSecretName: airflow-redis-credentials config: logging: enableVectorAgent: {{ lookup('env', 'VECTOR_AGGREGATOR') | length > 0 }} diff --git a/tests/templates/kuttl/cluster-operation/10-pause-airflow.yaml.j2 b/tests/templates/kuttl/cluster-operation/10-pause-airflow.yaml.j2 index e84c9653..052c7fd7 100644 --- a/tests/templates/kuttl/cluster-operation/10-pause-airflow.yaml.j2 +++ b/tests/templates/kuttl/cluster-operation/10-pause-airflow.yaml.j2 @@ -12,39 +12,7 @@ spec: clusterOperation: reconciliationPaused: true stopped: false - image: -{% if test_scenario['values']['airflow-latest'].find(",") > 0 %} - custom: "{{ test_scenario['values']['airflow-latest'].split(',')[1] }}" - productVersion: "{{ test_scenario['values']['airflow-latest'].split(',')[0] }}" -{% else %} - productVersion: "{{ test_scenario['values']['airflow-latest'] }}" -{% endif %} - pullPolicy: IfNotPresent - clusterConfig: -{% if lookup('env', 'VECTOR_AGGREGATOR') %} - vectorAggregatorConfigMapName: vector-aggregator-discovery -{% endif %} - credentialsSecret: test-airflow-credentials - webservers: - roleConfig: - listenerClass: external-unstable - config: - logging: - enableVectorAgent: {{ lookup('env', 'VECTOR_AGGREGATOR') | length > 0 }} - roleGroups: - default: - replicas: 1 celeryExecutors: - config: - logging: - enableVectorAgent: {{ lookup('env', 'VECTOR_AGGREGATOR') | length > 0 }} roleGroups: default: replicas: 3 # ignored because paused - schedulers: - config: - logging: - enableVectorAgent: {{ lookup('env', 'VECTOR_AGGREGATOR') | length > 0 }} - roleGroups: - default: - replicas: 1 diff --git a/tests/templates/kuttl/cluster-operation/20-stop-airflow.yaml.j2 b/tests/templates/kuttl/cluster-operation/20-stop-airflow.yaml.j2 index 470f8d7d..fea242b9 100644 --- a/tests/templates/kuttl/cluster-operation/20-stop-airflow.yaml.j2 +++ b/tests/templates/kuttl/cluster-operation/20-stop-airflow.yaml.j2 @@ -12,39 +12,3 @@ spec: clusterOperation: reconciliationPaused: false stopped: true - image: -{% if test_scenario['values']['airflow-latest'].find(",") > 0 %} - custom: "{{ test_scenario['values']['airflow-latest'].split(',')[1] }}" - productVersion: "{{ test_scenario['values']['airflow-latest'].split(',')[0] }}" -{% else %} - productVersion: "{{ test_scenario['values']['airflow-latest'] }}" -{% endif %} - pullPolicy: IfNotPresent - clusterConfig: -{% if lookup('env', 'VECTOR_AGGREGATOR') %} - vectorAggregatorConfigMapName: vector-aggregator-discovery -{% endif %} - credentialsSecret: test-airflow-credentials - webservers: - roleConfig: - listenerClass: external-unstable - config: - logging: - enableVectorAgent: {{ lookup('env', 'VECTOR_AGGREGATOR') | length > 0 }} - roleGroups: - default: - replicas: 1 - celeryExecutors: - config: - logging: - enableVectorAgent: {{ lookup('env', 'VECTOR_AGGREGATOR') | length > 0 }} - roleGroups: - default: - replicas: 3 # ignored because paused - schedulers: - config: - logging: - enableVectorAgent: {{ lookup('env', 'VECTOR_AGGREGATOR') | length > 0 }} - roleGroups: - default: - replicas: 1 diff --git a/tests/templates/kuttl/cluster-operation/30-restart-airflow.yaml.j2 b/tests/templates/kuttl/cluster-operation/30-restart-airflow.yaml.j2 index 7b90ad6b..7a57eed7 100644 --- a/tests/templates/kuttl/cluster-operation/30-restart-airflow.yaml.j2 +++ b/tests/templates/kuttl/cluster-operation/30-restart-airflow.yaml.j2 @@ -12,41 +12,6 @@ spec: clusterOperation: reconciliationPaused: false stopped: false - image: -{% if test_scenario['values']['airflow-latest'].find(",") > 0 %} - custom: "{{ test_scenario['values']['airflow-latest'].split(',')[1] }}" - productVersion: "{{ test_scenario['values']['airflow-latest'].split(',')[0] }}" -{% else %} - productVersion: "{{ test_scenario['values']['airflow-latest'] }}" -{% endif %} - pullPolicy: IfNotPresent clusterConfig: -{% if lookup('env', 'VECTOR_AGGREGATOR') %} - vectorAggregatorConfigMapName: vector-aggregator-discovery -{% endif %} - credentialsSecret: test-airflow-credentials databaseInitialization: enabled: false - webservers: - roleConfig: - listenerClass: external-unstable - config: - logging: - enableVectorAgent: {{ lookup('env', 'VECTOR_AGGREGATOR') | length > 0 }} - roleGroups: - default: - replicas: 1 - celeryExecutors: - config: - logging: - enableVectorAgent: {{ lookup('env', 'VECTOR_AGGREGATOR') | length > 0 }} - roleGroups: - default: - replicas: 3 - schedulers: - config: - logging: - enableVectorAgent: {{ lookup('env', 'VECTOR_AGGREGATOR') | length > 0 }} - roleGroups: - default: - replicas: 1 diff --git a/tests/templates/kuttl/cluster-operation/31-assert.yaml b/tests/templates/kuttl/cluster-operation/31-assert.yaml index 9c152c5f..4c33dbe9 100644 --- a/tests/templates/kuttl/cluster-operation/31-assert.yaml +++ b/tests/templates/kuttl/cluster-operation/31-assert.yaml @@ -5,4 +5,4 @@ kind: TestAssert timeout: 30 commands: - script: | - kubectl -n $NAMESPACE logs airflow-scheduler-default-0 | grep -q "Database migrating done!" && exit 1 || exit 0 + kubectl -n $NAMESPACE logs airflow-scheduler-default-0 | grep "Database migrating done!" && exit 1 || exit 0 diff --git a/tests/templates/kuttl/external-access/install-airflow-cluster.yaml.j2 b/tests/templates/kuttl/external-access/install-airflow-cluster.yaml.j2 index 84f6547a..f52378dc 100644 --- a/tests/templates/kuttl/external-access/install-airflow-cluster.yaml.j2 +++ b/tests/templates/kuttl/external-access/install-airflow-cluster.yaml.j2 @@ -2,7 +2,7 @@ apiVersion: v1 kind: Secret metadata: - name: test-airflow-credentials + name: airflow-admin-credentials type: Opaque stringData: adminUser.username: airflow @@ -10,9 +10,22 @@ stringData: adminUser.lastname: Admin adminUser.email: airflow@airflow.com adminUser.password: airflow - connections.sqlalchemyDatabaseUri: postgresql+psycopg2://airflow:airflow@airflow-postgresql/airflow - connections.celeryResultBackend: db+postgresql://airflow:airflow@airflow-postgresql/airflow - connections.celeryBrokerUrl: redis://:redis@airflow-redis-master:6379/0 +--- +apiVersion: v1 +kind: Secret +metadata: + name: airflow-postgresql-credentials +stringData: + username: airflow + password: airflow +--- +apiVersion: v1 +kind: Secret +metadata: + name: airflow-redis-credentials +stringData: + username: "" + password: redis --- apiVersion: airflow.stackable.tech/v1alpha1 kind: AirflowCluster @@ -29,7 +42,12 @@ spec: pullPolicy: IfNotPresent clusterConfig: loadExamples: false - credentialsSecret: test-airflow-credentials + credentialsSecretName: airflow-admin-credentials + metadataDatabase: + postgresql: + host: airflow-postgresql + database: airflow + credentialsSecretName: airflow-postgresql-credentials webservers: roleConfig: listenerClass: test-external-stable-$NAMESPACE @@ -45,6 +63,15 @@ spec: replicas: 1 {% if test_scenario['values']['executor'] == 'celery' %} celeryExecutors: + resultBackend: + postgresql: + host: airflow-postgresql + database: airflow + credentialsSecretName: airflow-postgresql-credentials + broker: + redis: + host: airflow-redis-master + credentialsSecretName: airflow-redis-credentials roleGroups: default: replicas: 1 diff --git a/tests/templates/kuttl/ldap/60-install-airflow-cluster.yaml.j2 b/tests/templates/kuttl/ldap/60-install-airflow-cluster.yaml.j2 index cbba7152..d818b52d 100644 --- a/tests/templates/kuttl/ldap/60-install-airflow-cluster.yaml.j2 +++ b/tests/templates/kuttl/ldap/60-install-airflow-cluster.yaml.j2 @@ -10,7 +10,7 @@ commands: apiVersion: v1 kind: Secret metadata: - name: test-airflow-credentials + name: airflow-admin-credentials type: Opaque stringData: adminUser.username: airflow @@ -18,10 +18,23 @@ commands: adminUser.lastname: Admin adminUser.email: airflow@airflow.com adminUser.password: airflow - connections.sqlalchemyDatabaseUri: postgresql+psycopg2://airflow:airflow@airflow-postgresql/airflow + --- + apiVersion: v1 + kind: Secret + metadata: + name: airflow-postgresql-credentials + stringData: + username: airflow + password: airflow {% if test_scenario['values']['executor'] == 'celery' %} - connections.celeryResultBackend: db+postgresql://airflow:airflow@airflow-postgresql/airflow - connections.celeryBrokerUrl: redis://:redis@airflow-redis-master:6379/0 + --- + apiVersion: v1 + kind: Secret + metadata: + name: airflow-redis-credentials + stringData: + username: "" + password: redis {% endif %} --- apiVersion: v1 @@ -52,7 +65,12 @@ commands: vectorAggregatorConfigMapName: vector-aggregator-discovery {% endif %} loadExamples: true - credentialsSecret: test-airflow-credentials + credentialsSecretName: airflow-admin-credentials + metadataDatabase: + postgresql: + host: airflow-postgresql + database: airflow + credentialsSecretName: airflow-postgresql-credentials authentication: - authenticationClass: {% if test_scenario['values']['ldap-authentication'] == 'no-tls' -%} no-tls-$NAMESPACE @@ -74,6 +92,15 @@ commands: replicas: 1 {% if test_scenario['values']['executor'] == 'celery' %} celeryExecutors: + resultBackend: + postgresql: + host: airflow-postgresql + database: airflow + credentialsSecretName: airflow-postgresql-credentials + broker: + redis: + host: airflow-redis-master + credentialsSecretName: airflow-redis-credentials config: logging: enableVectorAgent: {{ lookup('env', 'VECTOR_AGGREGATOR') | length > 0 }} diff --git a/tests/templates/kuttl/logging/41-install-airflow-cluster.yaml.j2 b/tests/templates/kuttl/logging/41-install-airflow-cluster.yaml.j2 index c60495da..9fddf14b 100644 --- a/tests/templates/kuttl/logging/41-install-airflow-cluster.yaml.j2 +++ b/tests/templates/kuttl/logging/41-install-airflow-cluster.yaml.j2 @@ -7,7 +7,7 @@ timeout: 480 apiVersion: v1 kind: Secret metadata: - name: test-airflow-credentials + name: airflow-admin-credentials type: Opaque stringData: adminUser.username: airflow @@ -15,10 +15,23 @@ stringData: adminUser.lastname: Admin adminUser.email: airflow@airflow.com adminUser.password: airflow - connections.sqlalchemyDatabaseUri: postgresql+psycopg2://airflow:airflow@airflow-postgresql/airflow +--- +apiVersion: v1 +kind: Secret +metadata: + name: airflow-postgresql-credentials +stringData: + username: airflow + password: airflow {% if test_scenario['values']['executor'] == 'celery' %} - connections.celeryResultBackend: db+postgresql://airflow:airflow@airflow-postgresql/airflow - connections.celeryBrokerUrl: redis://:redis@airflow-redis-master:6379/0 +--- +apiVersion: v1 +kind: Secret +metadata: + name: airflow-redis-credentials +stringData: + username: "" + password: redis {% endif %} --- apiVersion: v1 @@ -77,7 +90,12 @@ spec: clusterConfig: vectorAggregatorConfigMapName: airflow-vector-aggregator-discovery loadExamples: true - credentialsSecret: test-airflow-credentials + credentialsSecretName: airflow-admin-credentials + metadataDatabase: + postgresql: + host: airflow-postgresql + database: airflow + credentialsSecretName: airflow-postgresql-credentials dagsGitSync: - repo: https://github.com/stackabletech/example-dags gitFolder: dags @@ -144,6 +162,15 @@ spec: configMap: airflow-log-config {% if test_scenario['values']['executor'] == 'celery' %} celeryExecutors: + resultBackend: + postgresql: + host: airflow-postgresql + database: airflow + credentialsSecretName: airflow-postgresql-credentials + broker: + redis: + host: airflow-redis-master + credentialsSecretName: airflow-redis-credentials config: resources: cpu: diff --git a/tests/templates/kuttl/mount-dags-configmap/30-install-airflow-cluster.yaml.j2 b/tests/templates/kuttl/mount-dags-configmap/30-install-airflow-cluster.yaml.j2 index b3bf36c8..b4fb90d3 100644 --- a/tests/templates/kuttl/mount-dags-configmap/30-install-airflow-cluster.yaml.j2 +++ b/tests/templates/kuttl/mount-dags-configmap/30-install-airflow-cluster.yaml.j2 @@ -7,7 +7,7 @@ timeout: 480 apiVersion: v1 kind: Secret metadata: - name: test-airflow-credentials + name: airflow-admin-credentials type: Opaque stringData: adminUser.username: airflow @@ -15,10 +15,23 @@ stringData: adminUser.lastname: Admin adminUser.email: airflow@airflow.com adminUser.password: airflow - connections.sqlalchemyDatabaseUri: postgresql+psycopg2://airflow:airflow@airflow-postgresql/airflow +--- +apiVersion: v1 +kind: Secret +metadata: + name: airflow-postgresql-credentials +stringData: + username: airflow + password: airflow {% if test_scenario['values']['executor'] == 'celery' %} - connections.celeryResultBackend: db+postgresql://airflow:airflow@airflow-postgresql/airflow - connections.celeryBrokerUrl: redis://:redis@airflow-redis-master:6379/0 +--- +apiVersion: v1 +kind: Secret +metadata: + name: airflow-redis-credentials +stringData: + username: "" + password: redis {% endif %} --- apiVersion: v1 @@ -74,7 +87,12 @@ spec: {% if lookup('env', 'VECTOR_AGGREGATOR') %} vectorAggregatorConfigMapName: vector-aggregator-discovery {% endif %} - credentialsSecret: test-airflow-credentials + credentialsSecretName: airflow-admin-credentials + metadataDatabase: + postgresql: + host: airflow-postgresql + database: airflow + credentialsSecretName: airflow-postgresql-credentials volumes: - name: test-cm-dag configMap: @@ -96,6 +114,15 @@ spec: replicas: 1 {% if test_scenario['values']['executor'] == 'celery' %} celeryExecutors: + resultBackend: + postgresql: + host: airflow-postgresql + database: airflow + credentialsSecretName: airflow-postgresql-credentials + broker: + redis: + host: airflow-redis-master + credentialsSecretName: airflow-redis-credentials config: logging: enableVectorAgent: {{ lookup('env', 'VECTOR_AGGREGATOR') | length > 0 }} diff --git a/tests/templates/kuttl/mount-dags-gitsync/30-install-airflow-cluster.yaml.j2 b/tests/templates/kuttl/mount-dags-gitsync/30-install-airflow-cluster.yaml.j2 index 1ff2110a..3ea3d9a3 100644 --- a/tests/templates/kuttl/mount-dags-gitsync/30-install-airflow-cluster.yaml.j2 +++ b/tests/templates/kuttl/mount-dags-gitsync/30-install-airflow-cluster.yaml.j2 @@ -21,7 +21,7 @@ data: apiVersion: v1 kind: Secret metadata: - name: git-credentials + name: airflow-git-credentials type: Opaque data: # This is a fine-grained access token for the owner of the repo (stackable-airflow/dags) which has read only access @@ -34,7 +34,7 @@ data: apiVersion: v1 kind: Secret metadata: - name: test-airflow-credentials + name: airflow-admin-credentials type: Opaque stringData: adminUser.username: airflow @@ -42,10 +42,23 @@ stringData: adminUser.lastname: Admin adminUser.email: airflow@airflow.com adminUser.password: airflow - connections.sqlalchemyDatabaseUri: postgresql+psycopg2://airflow:airflow@airflow-postgresql/airflow +--- +apiVersion: v1 +kind: Secret +metadata: + name: airflow-postgresql-credentials +stringData: + username: airflow + password: airflow {% if test_scenario['values']['executor'] == 'celery' %} - connections.celeryResultBackend: db+postgresql://airflow:airflow@airflow-postgresql/airflow - connections.celeryBrokerUrl: redis://:redis@airflow-redis-master:6379/0 +--- +apiVersion: v1 +kind: Secret +metadata: + name: airflow-redis-credentials +stringData: + username: "" + password: redis {% endif %} --- apiVersion: v1 @@ -73,7 +86,12 @@ spec: {% if lookup('env', 'VECTOR_AGGREGATOR') %} vectorAggregatorConfigMapName: vector-aggregator-discovery {% endif %} - credentialsSecret: test-airflow-credentials + credentialsSecretName: airflow-admin-credentials + metadataDatabase: + postgresql: + host: airflow-postgresql + database: airflow + credentialsSecretName: airflow-postgresql-credentials dagsGitSync: {% if test_scenario['values']['access'] == 'ssh' %} - repo: ssh://git@github.com/stackable-airflow/dags.git @@ -83,7 +101,7 @@ spec: {% if test_scenario['values']['access'] == 'https' %} - repo: https://github.com/stackable-airflow/dags credentials: - basicAuthSecretName: git-credentials + basicAuthSecretName: airflow-git-credentials {% endif %} {% if test_scenario['values']['executor'] == 'celery' %} # Just setting some values to increase the test coverage (defaults should work just fine) @@ -114,6 +132,15 @@ spec: replicas: 1 {% if test_scenario['values']['executor'] == 'celery' %} celeryExecutors: + resultBackend: + postgresql: + host: airflow-postgresql + database: airflow + credentialsSecretName: airflow-postgresql-credentials + broker: + redis: + host: airflow-redis-master + credentialsSecretName: airflow-redis-credentials config: logging: enableVectorAgent: {{ lookup('env', 'VECTOR_AGGREGATOR') | length > 0 }} diff --git a/tests/templates/kuttl/oidc/install-airflow.yaml.j2 b/tests/templates/kuttl/oidc/install-airflow.yaml.j2 index 48826e4b..ea73899e 100644 --- a/tests/templates/kuttl/oidc/install-airflow.yaml.j2 +++ b/tests/templates/kuttl/oidc/install-airflow.yaml.j2 @@ -3,7 +3,7 @@ apiVersion: v1 kind: Secret metadata: - name: airflow-credentials + name: airflow-admin-credentials type: Opaque stringData: adminUser.username: airflow @@ -11,7 +11,14 @@ stringData: adminUser.lastname: Admin adminUser.email: airflow@airflow.com adminUser.password: airflow - connections.sqlalchemyDatabaseUri: postgresql+psycopg2://airflow:airflow@airflow-postgresql/airflow +--- +apiVersion: v1 +kind: Secret +metadata: + name: airflow-postgresql-credentials +stringData: + username: airflow + password: airflow --- apiVersion: v1 kind: Secret @@ -52,7 +59,12 @@ spec: oidc: clientCredentialsSecret: airflow-keycloak2-client userRegistrationRole: Admin - credentialsSecret: airflow-credentials + credentialsSecretName: airflow-admin-credentials + metadataDatabase: + postgresql: + host: airflow-postgresql + database: airflow + credentialsSecretName: airflow-postgresql-credentials {% if lookup('env', 'VECTOR_AGGREGATOR') %} vectorAggregatorConfigMapName: vector-aggregator-discovery {% endif %} diff --git a/tests/templates/kuttl/opa/30-install-airflow.yaml.j2 b/tests/templates/kuttl/opa/30-install-airflow.yaml.j2 index 9fe3daa0..2ac45360 100644 --- a/tests/templates/kuttl/opa/30-install-airflow.yaml.j2 +++ b/tests/templates/kuttl/opa/30-install-airflow.yaml.j2 @@ -7,7 +7,7 @@ metadata: apiVersion: v1 kind: Secret metadata: - name: airflow-credentials + name: airflow-admin-credentials type: Opaque stringData: adminUser.username: airflow @@ -15,7 +15,22 @@ stringData: adminUser.lastname: Admin adminUser.email: airflow@airflow.com adminUser.password: airflow - connections.sqlalchemyDatabaseUri: postgresql+psycopg2://airflow:airflow@airflow-postgresql/airflow +--- +apiVersion: v1 +kind: Secret +metadata: + name: airflow-postgresql-credentials +stringData: + username: airflow + password: airflow +--- +apiVersion: v1 +kind: Secret +metadata: + name: airflow-redis-credentials +stringData: + username: "" + password: redis --- apiVersion: airflow.stackable.tech/v1alpha1 kind: AirflowCluster @@ -38,7 +53,12 @@ spec: cache: entryTimeToLive: 5s maxEntries: 10 - credentialsSecret: airflow-credentials + credentialsSecretName: airflow-admin-credentials + metadataDatabase: + postgresql: + host: airflow-postgresql + database: airflow + credentialsSecretName: airflow-postgresql-credentials exposeConfig: true loadExamples: true {% if lookup('env', 'VECTOR_AGGREGATOR') %} diff --git a/tests/templates/kuttl/orphaned-resources/30-install-airflow-cluster.yaml.j2 b/tests/templates/kuttl/orphaned-resources/30-install-airflow-cluster.yaml.j2 index 3aa50e6e..20d3fef4 100644 --- a/tests/templates/kuttl/orphaned-resources/30-install-airflow-cluster.yaml.j2 +++ b/tests/templates/kuttl/orphaned-resources/30-install-airflow-cluster.yaml.j2 @@ -7,7 +7,7 @@ timeout: 480 apiVersion: v1 kind: Secret metadata: - name: test-airflow-credentials + name: airflow-admin-credentials type: Opaque stringData: adminUser.username: airflow @@ -15,9 +15,22 @@ stringData: adminUser.lastname: Admin adminUser.email: airflow@airflow.com adminUser.password: airflow - connections.sqlalchemyDatabaseUri: postgresql+psycopg2://airflow:airflow@airflow-postgresql/airflow - connections.celeryResultBackend: db+postgresql://airflow:airflow@airflow-postgresql/airflow - connections.celeryBrokerUrl: redis://:redis@airflow-redis-master:6379/0 +--- +apiVersion: v1 +kind: Secret +metadata: + name: airflow-postgresql-credentials +stringData: + username: airflow + password: airflow +--- +apiVersion: v1 +kind: Secret +metadata: + name: airflow-redis-credentials +stringData: + username: "" + password: redis --- apiVersion: airflow.stackable.tech/v1alpha1 kind: AirflowCluster @@ -36,7 +49,12 @@ spec: {% if lookup('env', 'VECTOR_AGGREGATOR') %} vectorAggregatorConfigMapName: vector-aggregator-discovery {% endif %} - credentialsSecret: test-airflow-credentials + credentialsSecretName: airflow-admin-credentials + metadataDatabase: + postgresql: + host: airflow-postgresql + database: airflow + credentialsSecretName: airflow-postgresql-credentials webservers: roleConfig: listenerClass: external-unstable @@ -47,6 +65,15 @@ spec: default: replicas: 1 celeryExecutors: + resultBackend: + postgresql: + host: airflow-postgresql + database: airflow + credentialsSecretName: airflow-postgresql-credentials + broker: + redis: + host: airflow-redis-master + credentialsSecretName: airflow-redis-credentials config: logging: enableVectorAgent: {{ lookup('env', 'VECTOR_AGGREGATOR') | length > 0 }} diff --git a/tests/templates/kuttl/overrides/10-install-airflow.yaml.j2 b/tests/templates/kuttl/overrides/10-install-airflow.yaml.j2 index 7ae53a0c..aca2981a 100644 --- a/tests/templates/kuttl/overrides/10-install-airflow.yaml.j2 +++ b/tests/templates/kuttl/overrides/10-install-airflow.yaml.j2 @@ -2,7 +2,7 @@ apiVersion: v1 kind: Secret metadata: - name: airflow-credentials + name: airflow-admin-credentials type: Opaque stringData: adminUser.username: airflow @@ -10,9 +10,22 @@ stringData: adminUser.lastname: Admin adminUser.email: airflow@airflow.com adminUser.password: airflow - connections.sqlalchemyDatabaseUri: postgresql+psycopg2://airflow:airflow@airflow-postgresql/airflow - connections.celeryResultBackend: db+postgresql://airflow:airflow@airflow-postgresql/airflow - connections.celeryBrokerUrl: redis://:redis@airflow-redis-master:6379/0 +--- +apiVersion: v1 +kind: Secret +metadata: + name: airflow-postgresql-credentials +stringData: + username: airflow + password: airflow +--- +apiVersion: v1 +kind: Secret +metadata: + name: airflow-redis-credentials +stringData: + username: "" + password: redis --- apiVersion: v1 kind: Secret @@ -25,9 +38,6 @@ stringData: adminUser.lastname: Admin adminUser.email: airflow@airflow.com adminUser.password: airflow - connections.sqlalchemyDatabaseUri: postgresql+psycopg2://airflow:airflow@airflow-postgresql/airflow - connections.celeryResultBackend: db+postgresql://airflow:airflow@airflow-postgresql/airflow - connections.celeryBrokerUrl: redis://:redis@airflow-redis-master:6379/0 --- apiVersion: airflow.stackable.tech/v1alpha1 kind: AirflowCluster @@ -45,14 +55,19 @@ spec: clusterConfig: loadExamples: true exposeConfig: false - credentialsSecret: airflow-credentials + credentialsSecretName: airflow-admin-credentials + metadataDatabase: + postgresql: + host: airflow-postgresql + database: airflow + credentialsSecretName: airflow-postgresql-credentials webservers: roleConfig: listenerClass: external-unstable envOverrides: COMMON_VAR: role-value # overridden by role group below ROLE_VAR: role-value # only defined here at role level - credentialsSecret: test-override + credentialsSecretName: test-override roleGroups: default: replicas: 1 @@ -60,6 +75,15 @@ spec: COMMON_VAR: group-value # overrides role value GROUP_VAR: group-value # only defined here at group level celeryExecutors: + resultBackend: + postgresql: + host: airflow-postgresql + database: airflow + credentialsSecretName: airflow-postgresql-credentials + broker: + redis: + host: airflow-redis-master + credentialsSecretName: airflow-redis-credentials envOverrides: COMMON_VAR: role-value # overridden by role group below ROLE_VAR: role-value # only defined here at role level @@ -101,4 +125,4 @@ spec: default: replicas: 1 envOverrides: - credentialsSecret: test-override + credentialsSecretName: test-override diff --git a/tests/templates/kuttl/overrides/11-assert.yaml b/tests/templates/kuttl/overrides/11-assert.yaml index 2ac0f892..6764bc9d 100644 --- a/tests/templates/kuttl/overrides/11-assert.yaml +++ b/tests/templates/kuttl/overrides/11-assert.yaml @@ -12,7 +12,7 @@ commands: kubectl -n $NAMESPACE get sts airflow-celery-webserver-default -o yaml | yq -e '.spec.template.spec.containers[] | select (.name == "airflow") | .env[] | select (.name == "COMMON_VAR" and .value == "group-value")' kubectl -n $NAMESPACE get sts airflow-celery-webserver-default -o yaml | yq -e '.spec.template.spec.containers[] | select (.name == "airflow") | .env[] | select (.name == "GROUP_VAR" and .value == "group-value")' kubectl -n $NAMESPACE get sts airflow-celery-webserver-default -o yaml | yq -e '.spec.template.spec.containers[] | select (.name == "airflow") | .env[] | select (.name == "ROLE_VAR" and .value == "role-value")' - kubectl -n $NAMESPACE get sts airflow-celery-webserver-default -o yaml | yq -e '.spec.template.spec.containers[] | select (.name == "airflow") | .env[] | select (.name == "credentialsSecret" and .value == "test-override")' + kubectl -n $NAMESPACE get sts airflow-celery-webserver-default -o yaml | yq -e '.spec.template.spec.containers[] | select (.name == "airflow") | .env[] | select (.name == "credentialsSecretName" and .value == "test-override")' - script: | diff --git a/tests/templates/kuttl/overrides/20-install-airflow2.yaml.j2 b/tests/templates/kuttl/overrides/20-install-airflow2.yaml.j2 index 96edce21..a0c89464 100644 --- a/tests/templates/kuttl/overrides/20-install-airflow2.yaml.j2 +++ b/tests/templates/kuttl/overrides/20-install-airflow2.yaml.j2 @@ -15,7 +15,12 @@ spec: clusterConfig: loadExamples: true exposeConfig: false - credentialsSecret: airflow-credentials + credentialsSecretName: airflow-admin-credentials + metadataDatabase: + postgresql: + host: airflow-postgresql + database: airflow + credentialsSecretName: airflow-postgresql-credentials webservers: roleConfig: listenerClass: external-unstable diff --git a/tests/templates/kuttl/remote-logging/40-install-airflow-cluster.yaml.j2 b/tests/templates/kuttl/remote-logging/40-install-airflow-cluster.yaml.j2 index c0719d48..c61ddbe5 100644 --- a/tests/templates/kuttl/remote-logging/40-install-airflow-cluster.yaml.j2 +++ b/tests/templates/kuttl/remote-logging/40-install-airflow-cluster.yaml.j2 @@ -7,7 +7,7 @@ timeout: 480 apiVersion: v1 kind: Secret metadata: - name: test-airflow-credentials + name: airflow-admin-credentials type: Opaque stringData: adminUser.username: airflow @@ -15,10 +15,23 @@ stringData: adminUser.lastname: Admin adminUser.email: airflow@airflow.com adminUser.password: airflow - connections.sqlalchemyDatabaseUri: postgresql+psycopg2://airflow:airflow@airflow-postgresql/airflow +--- +apiVersion: v1 +kind: Secret +metadata: + name: airflow-postgresql-credentials +stringData: + username: airflow + password: airflow {% if test_scenario['values']['executor'] == 'celery' %} - connections.celeryResultBackend: db+postgresql://airflow:airflow@airflow-postgresql/airflow - connections.celeryBrokerUrl: redis://:redis@airflow-redis-master:6379/0 +--- +apiVersion: v1 +kind: Secret +metadata: + name: airflow-redis-credentials +stringData: + username: "" + password: redis {% endif %} --- apiVersion: airflow.stackable.tech/v1alpha1 @@ -36,7 +49,12 @@ spec: pullPolicy: IfNotPresent clusterConfig: loadExamples: true - credentialsSecret: test-airflow-credentials + credentialsSecretName: airflow-admin-credentials + metadataDatabase: + postgresql: + host: airflow-postgresql + database: airflow + credentialsSecretName: airflow-postgresql-credentials webservers: roleConfig: listenerClass: external-unstable @@ -50,6 +68,15 @@ spec: AIRFLOW__LOGGING__REMOTE_LOG_CONN_ID: minio_conn {% if test_scenario['values']['executor'] == 'celery' %} celeryExecutors: + resultBackend: + postgresql: + host: airflow-postgresql + database: airflow + credentialsSecretName: airflow-postgresql-credentials + broker: + redis: + host: airflow-redis-master + credentialsSecretName: airflow-redis-credentials roleGroups: default: replicas: 2 diff --git a/tests/templates/kuttl/resources/30-install-airflow-cluster.yaml.j2 b/tests/templates/kuttl/resources/30-install-airflow-cluster.yaml.j2 index 9f7782ca..bfc1fde8 100644 --- a/tests/templates/kuttl/resources/30-install-airflow-cluster.yaml.j2 +++ b/tests/templates/kuttl/resources/30-install-airflow-cluster.yaml.j2 @@ -7,7 +7,7 @@ timeout: 480 apiVersion: v1 kind: Secret metadata: - name: test-airflow-credentials + name: airflow-admin-credentials type: Opaque stringData: adminUser.username: airflow @@ -15,9 +15,22 @@ stringData: adminUser.lastname: Admin adminUser.email: airflow@airflow.com adminUser.password: airflow - connections.sqlalchemyDatabaseUri: postgresql+psycopg2://airflow:airflow@airflow-postgresql/airflow - connections.celeryResultBackend: db+postgresql://airflow:airflow@airflow-postgresql/airflow - connections.celeryBrokerUrl: redis://:redis@airflow-redis-master:6379/0 +--- +apiVersion: v1 +kind: Secret +metadata: + name: airflow-postgresql-credentials +stringData: + username: airflow + password: airflow +--- +apiVersion: v1 +kind: Secret +metadata: + name: airflow-redis-credentials +stringData: + username: "" + password: redis --- apiVersion: airflow.stackable.tech/v1alpha1 kind: AirflowCluster @@ -36,7 +49,12 @@ spec: {% if lookup('env', 'VECTOR_AGGREGATOR') %} vectorAggregatorConfigMapName: vector-aggregator-discovery {% endif %} - credentialsSecret: test-airflow-credentials + credentialsSecretName: airflow-admin-credentials + metadataDatabase: + postgresql: + host: airflow-postgresql + database: airflow + credentialsSecretName: airflow-postgresql-credentials webservers: roleConfig: listenerClass: external-unstable @@ -47,6 +65,15 @@ spec: default: replicas: 1 celeryExecutors: + resultBackend: + postgresql: + host: airflow-postgresql + database: airflow + credentialsSecretName: airflow-postgresql-credentials + broker: + redis: + host: airflow-redis-master + credentialsSecretName: airflow-redis-credentials config: logging: enableVectorAgent: {{ lookup('env', 'VECTOR_AGGREGATOR') | length > 0 }} diff --git a/tests/templates/kuttl/smoke/40-install-airflow-cluster.yaml.j2 b/tests/templates/kuttl/smoke/40-install-airflow-cluster.yaml.j2 index 7513506c..2b4852f7 100644 --- a/tests/templates/kuttl/smoke/40-install-airflow-cluster.yaml.j2 +++ b/tests/templates/kuttl/smoke/40-install-airflow-cluster.yaml.j2 @@ -7,7 +7,7 @@ timeout: 480 apiVersion: v1 kind: Secret metadata: - name: test-airflow-credentials + name: airflow-admin-credentials type: Opaque stringData: adminUser.username: airflow @@ -15,10 +15,23 @@ stringData: adminUser.lastname: Admin adminUser.email: airflow@airflow.com adminUser.password: airflow - connections.sqlalchemyDatabaseUri: postgresql+psycopg2://airflow:airflow@airflow-postgresql/airflow +--- +apiVersion: v1 +kind: Secret +metadata: + name: airflow-postgresql-credentials +stringData: + username: airflow + password: airflow {% if test_scenario['values']['executor'] == 'celery' %} - connections.celeryResultBackend: db+postgresql://airflow:airflow@airflow-postgresql/airflow - connections.celeryBrokerUrl: redis://:redis@airflow-redis-master:6379/0 +--- +apiVersion: v1 +kind: Secret +metadata: + name: airflow-redis-credentials +stringData: + username: "" + password: redis {% endif %} --- apiVersion: airflow.stackable.tech/v1alpha1 @@ -39,7 +52,12 @@ spec: vectorAggregatorConfigMapName: vector-aggregator-discovery {% endif %} loadExamples: true - credentialsSecret: test-airflow-credentials + credentialsSecretName: airflow-admin-credentials + metadataDatabase: + postgresql: + host: airflow-postgresql + database: airflow + credentialsSecretName: airflow-postgresql-credentials webservers: roleConfig: listenerClass: external-unstable @@ -62,6 +80,15 @@ spec: COMMON_HEADER_VAR = "group-value" {% if test_scenario['values']['executor'] == 'celery' %} celeryExecutors: + resultBackend: + postgresql: + host: airflow-postgresql + database: airflow + credentialsSecretName: airflow-postgresql-credentials + broker: + redis: + host: airflow-redis-master + credentialsSecretName: airflow-redis-credentials config: logging: enableVectorAgent: {{ lookup('env', 'VECTOR_AGGREGATOR') | length > 0 }} diff --git a/tests/templates/kuttl/triggerer/30-install-airflow-cluster.yaml.j2 b/tests/templates/kuttl/triggerer/30-install-airflow-cluster.yaml.j2 index 2f728798..e82d79bc 100644 --- a/tests/templates/kuttl/triggerer/30-install-airflow-cluster.yaml.j2 +++ b/tests/templates/kuttl/triggerer/30-install-airflow-cluster.yaml.j2 @@ -7,7 +7,7 @@ timeout: 480 apiVersion: v1 kind: Secret metadata: - name: test-airflow-credentials + name: airflow-admin-credentials type: Opaque stringData: adminUser.username: airflow @@ -15,10 +15,23 @@ stringData: adminUser.lastname: Admin adminUser.email: airflow@airflow.com adminUser.password: airflow - connections.sqlalchemyDatabaseUri: postgresql+psycopg2://airflow:airflow@airflow-postgresql/airflow +--- +apiVersion: v1 +kind: Secret +metadata: + name: airflow-postgresql-credentials +stringData: + username: airflow + password: airflow {% if test_scenario['values']['executor'] == 'celery' %} - connections.celeryResultBackend: db+postgresql://airflow:airflow@airflow-postgresql/airflow - connections.celeryBrokerUrl: redis://:redis@airflow-redis-master:6379/0 +--- +apiVersion: v1 +kind: Secret +metadata: + name: airflow-redis-credentials +stringData: + username: "" + password: redis {% endif %} --- apiVersion: v1 @@ -96,7 +109,12 @@ spec: {% endif %} pullPolicy: IfNotPresent clusterConfig: - credentialsSecret: test-airflow-credentials + credentialsSecretName: airflow-admin-credentials + metadataDatabase: + postgresql: + host: airflow-postgresql + database: airflow + credentialsSecretName: airflow-postgresql-credentials volumes: - name: triggerer-dag configMap: @@ -115,6 +133,15 @@ spec: replicas: 1 {% if test_scenario['values']['executor'] == 'celery' %} celeryExecutors: + resultBackend: + postgresql: + host: airflow-postgresql + database: airflow + credentialsSecretName: airflow-postgresql-credentials + broker: + redis: + host: airflow-redis-master + credentialsSecretName: airflow-redis-credentials roleGroups: default: envOverrides: *envOverrides diff --git a/tests/templates/kuttl/versioning/30-install-airflow-cluster.yaml.j2 b/tests/templates/kuttl/versioning/30-install-airflow-cluster.yaml.j2 index 21c05198..5ca2c1b9 100644 --- a/tests/templates/kuttl/versioning/30-install-airflow-cluster.yaml.j2 +++ b/tests/templates/kuttl/versioning/30-install-airflow-cluster.yaml.j2 @@ -7,7 +7,7 @@ timeout: 480 apiVersion: v1 kind: Secret metadata: - name: git-credentials + name: airflow-git-credentials type: Opaque data: # This is a fine-grained access token for the owner of the repo (stackable-airflow/dags) which has read only access @@ -19,7 +19,7 @@ data: apiVersion: v1 kind: Secret metadata: - name: test-airflow-credentials + name: airflow-admin-credentials type: Opaque stringData: adminUser.username: airflow @@ -27,7 +27,14 @@ stringData: adminUser.lastname: Admin adminUser.email: airflow@airflow.com adminUser.password: airflow - connections.sqlalchemyDatabaseUri: postgresql+psycopg2://airflow:airflow@airflow-postgresql/airflow +--- +apiVersion: v1 +kind: Secret +metadata: + name: airflow-postgresql-credentials +stringData: + username: airflow + password: airflow --- apiVersion: airflow.stackable.tech/v1alpha1 kind: AirflowCluster @@ -46,12 +53,17 @@ spec: {% if lookup('env', 'VECTOR_AGGREGATOR') %} vectorAggregatorConfigMapName: vector-aggregator-discovery {% endif %} - credentialsSecret: test-airflow-credentials + credentialsSecretName: airflow-admin-credentials + metadataDatabase: + postgresql: + host: airflow-postgresql + database: airflow + credentialsSecretName: airflow-postgresql-credentials dagsGitSync: - repo: https://github.com/stackable-airflow/dags # v1alpha1 field which should be converted to an enum # N.B. only works for the existing v1 field - credentialsSecret: git-credentials + credentialsSecret: airflow-git-credentials gitFolder: "mount-dags-gitsync/dags_airflow3" wait: 5s webservers: