diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md
index d1357fe3..acad6c09 100644
--- a/.github/pull_request_template.md
+++ b/.github/pull_request_template.md
@@ -15,6 +15,8 @@
- [ ] Helm chart can be installed and deployed operator works
- [ ] Integration tests passed (for non trivial changes)
- [ ] Changes need to be "offline" compatible
+- [ ] Links to generated (nightly) docs added
+- [ ] Release note snippet added
### Reviewer
@@ -29,4 +31,7 @@
- [ ] Feature Tracker has been updated
- [ ] Proper release label has been added
-- [ ] [Roadmap](https://github.com/orgs/stackabletech/projects/25/views/1) has been updated
+- [ ] Links to generated (nightly) docs added
+- [ ] Release note snippet added
+- [ ] Add `type/deprecation` label & add to the [deprecation schedule](https://github.com/orgs/stackabletech/projects/44/views/1)
+- [ ] Add `type/experimental` label & add to the [experimental features tracker](https://github.com/orgs/stackabletech/projects/47)
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 9dfee91a..78add581 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -27,7 +27,7 @@ env:
CARGO_INCREMENTAL: '0'
CARGO_PROFILE_DEV_DEBUG: '0'
RUST_TOOLCHAIN_VERSION: "1.85.0"
- RUST_NIGHTLY_TOOLCHAIN_VERSION: "nightly-2025-01-15"
+ RUST_NIGHTLY_TOOLCHAIN_VERSION: "nightly-2025-05-26"
PYTHON_VERSION: "3.12"
RUSTFLAGS: "-D warnings"
RUSTDOCFLAGS: "-D warnings"
diff --git a/.github/workflows/pr_pre-commit.yaml b/.github/workflows/pr_pre-commit.yaml
index 4e5b9914..21f8d968 100644
--- a/.github/workflows/pr_pre-commit.yaml
+++ b/.github/workflows/pr_pre-commit.yaml
@@ -8,7 +8,7 @@ on:
env:
CARGO_TERM_COLOR: always
NIX_PKG_MANAGER_VERSION: "2.28.3"
- RUST_TOOLCHAIN_VERSION: "nightly-2025-01-15"
+ RUST_TOOLCHAIN_VERSION: "nightly-2025-05-26"
HADOLINT_VERSION: "v2.12.0"
PYTHON_VERSION: "3.12"
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 906defdd..2319cd56 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -82,7 +82,7 @@ repos:
- id: cargo-rustfmt
name: cargo-rustfmt
language: system
- entry: cargo +nightly-2025-01-15 fmt --all -- --check
+ entry: cargo +nightly-2025-05-26 fmt --all -- --check
stages: [pre-commit, pre-merge-commit]
pass_filenames: false
files: \.rs$
diff --git a/.readme/partials/borrowed/footer.md.j2 b/.readme/partials/borrowed/footer.md.j2
index a494f3b0..3d279e4d 100644
--- a/.readme/partials/borrowed/footer.md.j2
+++ b/.readme/partials/borrowed/footer.md.j2
@@ -62,3 +62,7 @@ This is enforced automatically when you submit a Pull Request where a bot will g
## Support
Get started with the community edition! If you want professional support, [we offer subscription plans and custom licensing](https://stackable.tech/en/plans/).
+
+## Sponsor
+
+If you want to support our work but don't need professional support please consider [sponsoring](https://github.com/sponsors/stackabletech) our work.
diff --git a/.readme/partials/borrowed/links.md.j2 b/.readme/partials/borrowed/links.md.j2
index 39e4e614..420b89ea 100644
--- a/.readme/partials/borrowed/links.md.j2
+++ b/.readme/partials/borrowed/links.md.j2
@@ -1,5 +1,4 @@
-{% if no_jenkins_job_badge %}{% else %}{% endif %}
[](https://GitHub.com/stackabletech/{{operator_name}}-operator/graphs/commit-activity)
[](https://docs.stackable.tech/home/stable/contributor/index.html)
[](./LICENSE)
diff --git a/.vscode/settings.json b/.vscode/settings.json
index 75be60a6..0818fc99 100644
--- a/.vscode/settings.json
+++ b/.vscode/settings.json
@@ -1,7 +1,7 @@
{
"rust-analyzer.rustfmt.overrideCommand": [
"rustfmt",
- "+nightly-2025-01-15",
+ "+nightly-2025-05-26",
"--edition",
"2024",
"--"
diff --git a/README.md b/README.md
index 132ccc94..357918e0 100644
--- a/README.md
+++ b/README.md
@@ -6,7 +6,6 @@
Stackable Operator for Apache Airflow
-
[](https://GitHub.com/stackabletech/airflow-operator/graphs/commit-activity)
[](https://docs.stackable.tech/home/stable/contributor/index.html)
[](./LICENSE)
@@ -103,4 +102,8 @@ This is enforced automatically when you submit a Pull Request where a bot will g
Get started with the community edition! If you want professional support, [we offer subscription plans and custom licensing](https://stackable.tech/en/plans/).
+## Sponsor
+
+If you want to support our work but don't need professional support please consider [sponsoring](https://github.com/sponsors/stackabletech) our work.
+
diff --git a/bors.toml b/bors.toml
deleted file mode 100644
index 420d30c8..00000000
--- a/bors.toml
+++ /dev/null
@@ -1,9 +0,0 @@
-status = [
- 'All tests passed'
-]
-delete_merged_branches = true
-use_squash_merge = true
-pr_status = [ 'license/cla' ]
-timeout_sec = 7200
-cut_body_after = ""
-required_approvals = 1
diff --git a/default.nix b/default.nix
index 263d8193..82f5928b 100644
--- a/default.nix
+++ b/default.nix
@@ -114,6 +114,10 @@ rec {
# (see https://github.com/pre-commit/pre-commit-hooks?tab=readme-ov-file#trailing-whitespace).
# So, remove the trailing newline already here to avoid that an
# unnecessary change is shown in Git.
- sed -i '$d' Cargo.nix
+ if [[ "$(uname)" == "Darwin" ]]; then
+ sed -i \"\" '$d' Cargo.nix
+ else
+ sed -i '$d' Cargo.nix
+ fi
'';
}
diff --git a/docker/Dockerfile b/docker/Dockerfile
index 5c7033ed..b132592a 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -1,5 +1,7 @@
-# syntax=docker/dockerfile:1.10.0@sha256:865e5dd094beca432e8c0a1d5e1c465db5f998dca4e439981029b3b81fb39ed5
+# syntax=docker/dockerfile:1.15.1@sha256:9857836c9ee4268391bb5b09f9f157f3c91bb15821bb77969642813b0d00518d
# NOTE: The syntax directive needs to be the first line in a Dockerfile
+# Find the latest versions here: https://hub.docker.com/r/docker/dockerfile/tags
+# And the changelogs: https://docs.docker.com/build/buildkit/dockerfile-release-notes/ or https://github.com/moby/buildkit/releases
# =============
# This file is automatically generated from the templates in stackabletech/operator-templating
@@ -49,7 +51,7 @@ LABEL com.redhat.license_terms=""
LABEL io.buildah.version=""
LABEL io.openshift.expose-services=""
-# https://github.com/opencontainers/image-spec/blob/036563a4a268d7c08b51a08f05a02a0fe74c7268/annotations.md#annotations
+# https://github.com/opencontainers/image-spec/blob/64294bd7a2bf2537e1a6a34d687caae70300b0c4/annotations.md#annotations
LABEL org.opencontainers.image.authors="info@stackable.tech"
LABEL org.opencontainers.image.url="https://stackable.tech"
LABEL org.opencontainers.image.vendor="Stackable GmbH"
diff --git a/rust-toolchain.toml b/rust-toolchain.toml
index fb21da0f..9ae738fb 100644
--- a/rust-toolchain.toml
+++ b/rust-toolchain.toml
@@ -1,3 +1,4 @@
# DO NOT EDIT, this file is generated by operator-templating
[toolchain]
channel = "1.85.0"
+profile = "default"
diff --git a/rust/operator-binary/src/airflow_controller.rs b/rust/operator-binary/src/airflow_controller.rs
index 569995cb..74e5dfb9 100644
--- a/rust/operator-binary/src/airflow_controller.rs
+++ b/rust/operator-binary/src/airflow_controller.rs
@@ -576,10 +576,10 @@ pub async fn reconcile_airflow(
.context(DeleteOrphanedResourcesSnafu)?;
let status = AirflowClusterStatus {
- conditions: compute_conditions(airflow, &[
- &ss_cond_builder,
- &cluster_operation_cond_builder,
- ]),
+ conditions: compute_conditions(
+ airflow,
+ &[&ss_cond_builder, &cluster_operation_cond_builder],
+ ),
};
client
diff --git a/rust/operator-binary/src/config.rs b/rust/operator-binary/src/config.rs
index 8bc5ca28..f130e5e9 100644
--- a/rust/operator-binary/src/config.rs
+++ b/rust/operator-binary/src/config.rs
@@ -476,7 +476,9 @@ mod tests {
("AUTH_TYPE".into(), "AUTH_OAUTH".into()),
("AUTH_USER_REGISTRATION".into(), "true".into()),
("AUTH_USER_REGISTRATION_ROLE".into(), "Admin".into()),
- ("OAUTH_PROVIDERS".into(), formatdoc! {"
+ (
+ "OAUTH_PROVIDERS".into(),
+ formatdoc! {"
[
{{ 'name': 'keycloak',
'icon': 'fa-key',
@@ -505,7 +507,8 @@ mod tests {
}},
}}
]
- "})
+ "}
+ )
]),
result
);
diff --git a/rust/operator-binary/src/crd/mod.rs b/rust/operator-binary/src/crd/mod.rs
index 4b74eb04..0a358a25 100644
--- a/rust/operator-binary/src/crd/mod.rs
+++ b/rust/operator-binary/src/crd/mod.rs
@@ -458,17 +458,20 @@ fn extract_role_from_webserver_config(
.role_groups
.into_iter()
.map(|(k, v)| {
- (k, RoleGroup {
- config: CommonConfiguration {
- config: v.config.config.airflow_config,
- config_overrides: v.config.config_overrides,
- env_overrides: v.config.env_overrides,
- cli_overrides: v.config.cli_overrides,
- pod_overrides: v.config.pod_overrides,
- product_specific_common_config: v.config.product_specific_common_config,
+ (
+ k,
+ RoleGroup {
+ config: CommonConfiguration {
+ config: v.config.config.airflow_config,
+ config_overrides: v.config.config_overrides,
+ env_overrides: v.config.env_overrides,
+ cli_overrides: v.config.cli_overrides,
+ pod_overrides: v.config.pod_overrides,
+ product_specific_common_config: v.config.product_specific_common_config,
+ },
+ replicas: v.replicas,
},
- replicas: v.replicas,
- })
+ )
})
.collect(),
}
diff --git a/rust/operator-binary/src/env_vars.rs b/rust/operator-binary/src/env_vars.rs
index bc3bbd69..3492df01 100644
--- a/rust/operator-binary/src/env_vars.rs
+++ b/rust/operator-binary/src/env_vars.rs
@@ -124,39 +124,54 @@ pub fn build_airflow_statefulset_envs(
}
let dags_folder = get_dags_folder(git_sync_resources);
- env.insert(AIRFLOW_CORE_DAGS_FOLDER.into(), EnvVar {
- name: AIRFLOW_CORE_DAGS_FOLDER.into(),
- value: Some(dags_folder),
- ..Default::default()
- });
+ env.insert(
+ AIRFLOW_CORE_DAGS_FOLDER.into(),
+ EnvVar {
+ name: AIRFLOW_CORE_DAGS_FOLDER.into(),
+ value: Some(dags_folder),
+ ..Default::default()
+ },
+ );
if airflow.spec.cluster_config.load_examples {
- env.insert(AIRFLOW_CORE_LOAD_EXAMPLES.into(), EnvVar {
- name: AIRFLOW_CORE_LOAD_EXAMPLES.into(),
- value: Some("True".into()),
- ..Default::default()
- });
+ env.insert(
+ AIRFLOW_CORE_LOAD_EXAMPLES.into(),
+ EnvVar {
+ name: AIRFLOW_CORE_LOAD_EXAMPLES.into(),
+ value: Some("True".into()),
+ ..Default::default()
+ },
+ );
} else {
- env.insert(AIRFLOW_CORE_LOAD_EXAMPLES.into(), EnvVar {
- name: AIRFLOW_CORE_LOAD_EXAMPLES.into(),
- value: Some("False".into()),
- ..Default::default()
- });
+ env.insert(
+ AIRFLOW_CORE_LOAD_EXAMPLES.into(),
+ EnvVar {
+ name: AIRFLOW_CORE_LOAD_EXAMPLES.into(),
+ value: Some("False".into()),
+ ..Default::default()
+ },
+ );
}
if airflow.spec.cluster_config.expose_config {
- env.insert(AIRFLOW_WEBSERVER_EXPOSE_CONFIG.into(), EnvVar {
- name: AIRFLOW_WEBSERVER_EXPOSE_CONFIG.into(),
- value: Some("True".into()),
- ..Default::default()
- });
+ env.insert(
+ AIRFLOW_WEBSERVER_EXPOSE_CONFIG.into(),
+ EnvVar {
+ name: AIRFLOW_WEBSERVER_EXPOSE_CONFIG.into(),
+ value: Some("True".into()),
+ ..Default::default()
+ },
+ );
}
- env.insert(AIRFLOW_CORE_EXECUTOR.into(), EnvVar {
- name: AIRFLOW_CORE_EXECUTOR.into(),
- value: Some(executor.to_string()),
- ..Default::default()
- });
+ env.insert(
+ AIRFLOW_CORE_EXECUTOR.into(),
+ EnvVar {
+ name: AIRFLOW_CORE_EXECUTOR.into(),
+ value: Some(executor.to_string()),
+ ..Default::default()
+ },
+ );
if let AirflowExecutor::KubernetesExecutor { .. } = executor {
env.insert(
@@ -167,11 +182,14 @@ pub fn build_airflow_statefulset_envs(
..Default::default()
},
);
- env.insert(AIRFLOW_KUBERNETES_EXECUTOR_NAMESPACE.into(), EnvVar {
- name: AIRFLOW_KUBERNETES_EXECUTOR_NAMESPACE.into(),
- value: airflow.namespace(),
- ..Default::default()
- });
+ env.insert(
+ AIRFLOW_KUBERNETES_EXECUTOR_NAMESPACE.into(),
+ EnvVar {
+ name: AIRFLOW_KUBERNETES_EXECUTOR_NAMESPACE.into(),
+ value: airflow.namespace(),
+ ..Default::default()
+ },
+ );
}
match airflow_role {
@@ -210,20 +228,26 @@ pub fn build_airflow_statefulset_envs(
// apply overrides last of all with a fixed ordering
if let Some(env_vars) = env_vars {
for (k, v) in env_vars.iter().collect::>() {
- env.insert(k.into(), EnvVar {
- name: k.to_string(),
- value: Some(v.to_string()),
- ..Default::default()
- });
+ env.insert(
+ k.into(),
+ EnvVar {
+ name: k.to_string(),
+ value: Some(v.to_string()),
+ ..Default::default()
+ },
+ );
}
}
// Needed for the `containerdebug` process to log it's tracing information to.
- env.insert("CONTAINERDEBUG_LOG_DIRECTORY".to_string(), EnvVar {
- name: "CONTAINERDEBUG_LOG_DIRECTORY".to_string(),
- value: Some(format!("{STACKABLE_LOG_DIR}/containerdebug")),
- value_from: None,
- });
+ env.insert(
+ "CONTAINERDEBUG_LOG_DIRECTORY".to_string(),
+ EnvVar {
+ name: "CONTAINERDEBUG_LOG_DIRECTORY".to_string(),
+ value: Some(format!("{STACKABLE_LOG_DIR}/containerdebug")),
+ value_from: None,
+ },
+ );
tracing::debug!("Env-var set [{:?}]", env);
Ok(transform_map_to_vec(env))
@@ -257,37 +281,52 @@ fn static_envs(
let dags_folder = get_dags_folder(git_sync_resources);
- env.insert(PYTHONPATH.into(), EnvVar {
- // PYTHONPATH must be extended to include the dags folder so that dag
- // dependencies can be found: this must be the actual path and not a variable.
- // Also include the airflow site-packages by default (for airflow and kubernetes classes etc.)
- name: PYTHONPATH.into(),
- value: Some(format!("{LOG_CONFIG_DIR}:{dags_folder}")),
- ..Default::default()
- });
- env.insert(AIRFLOW_LOGGING_LOGGING_CONFIG_CLASS.into(), EnvVar {
- name: AIRFLOW_LOGGING_LOGGING_CONFIG_CLASS.into(),
- value: Some("log_config.LOGGING_CONFIG".into()),
- ..Default::default()
- });
-
- env.insert(AIRFLOW_METRICS_STATSD_ON.into(), EnvVar {
- name: AIRFLOW_METRICS_STATSD_ON.into(),
- value: Some("True".into()),
- ..Default::default()
- });
-
- env.insert(AIRFLOW_METRICS_STATSD_HOST.into(), EnvVar {
- name: AIRFLOW_METRICS_STATSD_HOST.into(),
- value: Some("0.0.0.0".into()),
- ..Default::default()
- });
-
- env.insert(AIRFLOW_METRICS_STATSD_PORT.into(), EnvVar {
- name: AIRFLOW_METRICS_STATSD_PORT.into(),
- value: Some("9125".into()),
- ..Default::default()
- });
+ env.insert(
+ PYTHONPATH.into(),
+ EnvVar {
+ // PYTHONPATH must be extended to include the dags folder so that dag
+ // dependencies can be found: this must be the actual path and not a variable.
+ // Also include the airflow site-packages by default (for airflow and kubernetes classes etc.)
+ name: PYTHONPATH.into(),
+ value: Some(format!("{LOG_CONFIG_DIR}:{dags_folder}")),
+ ..Default::default()
+ },
+ );
+ env.insert(
+ AIRFLOW_LOGGING_LOGGING_CONFIG_CLASS.into(),
+ EnvVar {
+ name: AIRFLOW_LOGGING_LOGGING_CONFIG_CLASS.into(),
+ value: Some("log_config.LOGGING_CONFIG".into()),
+ ..Default::default()
+ },
+ );
+
+ env.insert(
+ AIRFLOW_METRICS_STATSD_ON.into(),
+ EnvVar {
+ name: AIRFLOW_METRICS_STATSD_ON.into(),
+ value: Some("True".into()),
+ ..Default::default()
+ },
+ );
+
+ env.insert(
+ AIRFLOW_METRICS_STATSD_HOST.into(),
+ EnvVar {
+ name: AIRFLOW_METRICS_STATSD_HOST.into(),
+ value: Some("0.0.0.0".into()),
+ ..Default::default()
+ },
+ );
+
+ env.insert(
+ AIRFLOW_METRICS_STATSD_PORT.into(),
+ EnvVar {
+ name: AIRFLOW_METRICS_STATSD_PORT.into(),
+ value: Some("9125".into()),
+ ..Default::default()
+ },
+ );
env.insert(
AIRFLOW_API_AUTH_BACKEND.into(),
@@ -324,26 +363,35 @@ pub fn build_airflow_template_envs(
),
);
- env.insert(AIRFLOW_CORE_EXECUTOR.into(), EnvVar {
- name: AIRFLOW_CORE_EXECUTOR.into(),
- value: Some("LocalExecutor".to_string()),
- ..Default::default()
- });
+ env.insert(
+ AIRFLOW_CORE_EXECUTOR.into(),
+ EnvVar {
+ name: AIRFLOW_CORE_EXECUTOR.into(),
+ value: Some("LocalExecutor".to_string()),
+ ..Default::default()
+ },
+ );
- env.insert(AIRFLOW_KUBERNETES_EXECUTOR_NAMESPACE.into(), EnvVar {
- name: AIRFLOW_KUBERNETES_EXECUTOR_NAMESPACE.into(),
- value: airflow.namespace(),
- ..Default::default()
- });
+ env.insert(
+ AIRFLOW_KUBERNETES_EXECUTOR_NAMESPACE.into(),
+ EnvVar {
+ name: AIRFLOW_KUBERNETES_EXECUTOR_NAMESPACE.into(),
+ value: airflow.namespace(),
+ ..Default::default()
+ },
+ );
// the config map also requires the dag-folder location as this will be passed on
// to the pods started by airflow.
let dags_folder = get_dags_folder(git_sync_resources);
- env.insert(AIRFLOW_CORE_DAGS_FOLDER.into(), EnvVar {
- name: AIRFLOW_CORE_DAGS_FOLDER.into(),
- value: Some(dags_folder),
- ..Default::default()
- });
+ env.insert(
+ AIRFLOW_CORE_DAGS_FOLDER.into(),
+ EnvVar {
+ name: AIRFLOW_CORE_DAGS_FOLDER.into(),
+ value: Some(dags_folder),
+ ..Default::default()
+ },
+ );
env.extend(static_envs(git_sync_resources));
@@ -351,27 +399,33 @@ pub fn build_airflow_template_envs(
// evaluated in the wrapper for each stackable spark container: this is necessary for pods
// that are created and then terminated (we do a similar thing for spark-k8s).
if config.logging.enable_vector_agent {
- env.insert("_STACKABLE_POST_HOOK".into(), EnvVar {
- name: "_STACKABLE_POST_HOOK".into(),
- value: Some(
- [
- // Wait for Vector to gather the logs.
- "sleep 10",
- &create_vector_shutdown_file_command(STACKABLE_LOG_DIR),
- ]
- .join("; "),
- ),
- ..Default::default()
- });
+ env.insert(
+ "_STACKABLE_POST_HOOK".into(),
+ EnvVar {
+ name: "_STACKABLE_POST_HOOK".into(),
+ value: Some(
+ [
+ // Wait for Vector to gather the logs.
+ "sleep 10",
+ &create_vector_shutdown_file_command(STACKABLE_LOG_DIR),
+ ]
+ .join("; "),
+ ),
+ ..Default::default()
+ },
+ );
}
// iterate over a BTreeMap to ensure the vars are written in a predictable order
for (k, v) in env_overrides.iter().collect::>() {
- env.insert(k.to_string(), EnvVar {
- name: k.to_string(),
- value: Some(v.to_string()),
- ..Default::default()
- });
+ env.insert(
+ k.to_string(),
+ EnvVar {
+ name: k.to_string(),
+ value: Some(v.to_string()),
+ ..Default::default()
+ },
+ );
}
tracing::debug!("Env-var set [{:?}]", env);
diff --git a/rust/operator-binary/src/main.rs b/rust/operator-binary/src/main.rs
index d3410673..e448f891 100644
--- a/rust/operator-binary/src/main.rs
+++ b/rust/operator-binary/src/main.rs
@@ -92,10 +92,13 @@ async fn main() -> anyhow::Result<()> {
)
.await?;
- let event_recorder = Arc::new(Recorder::new(client.as_kube_client(), Reporter {
- controller: AIRFLOW_FULL_CONTROLLER_NAME.to_string(),
- instance: None,
- }));
+ let event_recorder = Arc::new(Recorder::new(
+ client.as_kube_client(),
+ Reporter {
+ controller: AIRFLOW_FULL_CONTROLLER_NAME.to_string(),
+ instance: None,
+ },
+ ));
let airflow_controller = Controller::new(
watch_namespace.get_api::>(&client),