diff --git a/Cargo.lock b/Cargo.lock
index 7e90b125..9ad7098e 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -1372,7 +1372,7 @@ dependencies = [
[[package]]
name = "k8s-version"
version = "0.1.3"
-source = "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.105.0#7bfcac5f6515c8b4c8cf8def2edfde5ed8621aaa"
+source = "git+https://github.com/stackabletech//operator-rs.git?branch=spike%2Fgeneric-databases#69fbb2a0bc5329a8a3bf1904944474d3f3e13f64"
dependencies = [
"darling 0.23.0",
"regex",
@@ -2511,7 +2511,7 @@ dependencies = [
[[package]]
name = "stackable-operator"
version = "0.105.0"
-source = "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.105.0#7bfcac5f6515c8b4c8cf8def2edfde5ed8621aaa"
+source = "git+https://github.com/stackabletech//operator-rs.git?branch=spike%2Fgeneric-databases#69fbb2a0bc5329a8a3bf1904944474d3f3e13f64"
dependencies = [
"chrono",
"clap",
@@ -2549,7 +2549,7 @@ dependencies = [
[[package]]
name = "stackable-operator-derive"
version = "0.3.1"
-source = "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.105.0#7bfcac5f6515c8b4c8cf8def2edfde5ed8621aaa"
+source = "git+https://github.com/stackabletech//operator-rs.git?branch=spike%2Fgeneric-databases#69fbb2a0bc5329a8a3bf1904944474d3f3e13f64"
dependencies = [
"darling 0.23.0",
"proc-macro2",
@@ -2560,7 +2560,7 @@ dependencies = [
[[package]]
name = "stackable-shared"
version = "0.0.3"
-source = "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.105.0#7bfcac5f6515c8b4c8cf8def2edfde5ed8621aaa"
+source = "git+https://github.com/stackabletech//operator-rs.git?branch=spike%2Fgeneric-databases#69fbb2a0bc5329a8a3bf1904944474d3f3e13f64"
dependencies = [
"chrono",
"k8s-openapi",
@@ -2577,7 +2577,7 @@ dependencies = [
[[package]]
name = "stackable-telemetry"
version = "0.6.1"
-source = "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.105.0#7bfcac5f6515c8b4c8cf8def2edfde5ed8621aaa"
+source = "git+https://github.com/stackabletech//operator-rs.git?branch=spike%2Fgeneric-databases#69fbb2a0bc5329a8a3bf1904944474d3f3e13f64"
dependencies = [
"axum",
"clap",
@@ -2601,7 +2601,7 @@ dependencies = [
[[package]]
name = "stackable-versioned"
version = "0.8.3"
-source = "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.105.0#7bfcac5f6515c8b4c8cf8def2edfde5ed8621aaa"
+source = "git+https://github.com/stackabletech//operator-rs.git?branch=spike%2Fgeneric-databases#69fbb2a0bc5329a8a3bf1904944474d3f3e13f64"
dependencies = [
"schemars",
"serde",
@@ -2614,7 +2614,7 @@ dependencies = [
[[package]]
name = "stackable-versioned-macros"
version = "0.8.3"
-source = "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.105.0#7bfcac5f6515c8b4c8cf8def2edfde5ed8621aaa"
+source = "git+https://github.com/stackabletech//operator-rs.git?branch=spike%2Fgeneric-databases#69fbb2a0bc5329a8a3bf1904944474d3f3e13f64"
dependencies = [
"convert_case",
"darling 0.23.0",
diff --git a/Cargo.nix b/Cargo.nix
index acc14aeb..422b0aea 100644
--- a/Cargo.nix
+++ b/Cargo.nix
@@ -4274,9 +4274,9 @@ rec {
edition = "2024";
workspace_member = null;
src = pkgs.fetchgit {
- url = "https://github.com/stackabletech/operator-rs.git";
- rev = "7bfcac5f6515c8b4c8cf8def2edfde5ed8621aaa";
- sha256 = "02z7c2kjhcwg153j74n52wwcr5x0z70hc21hlsrxyclmc8ps1lmd";
+ url = "https://github.com/stackabletech//operator-rs.git";
+ rev = "69fbb2a0bc5329a8a3bf1904944474d3f3e13f64";
+ sha256 = "0m3d02fsc7z2mvr0rvlv0n30wi8jcv2m00fd82s23jnk3jpmkfry";
};
libName = "k8s_version";
authors = [
@@ -8201,9 +8201,9 @@ rec {
edition = "2024";
workspace_member = null;
src = pkgs.fetchgit {
- url = "https://github.com/stackabletech/operator-rs.git";
- rev = "7bfcac5f6515c8b4c8cf8def2edfde5ed8621aaa";
- sha256 = "02z7c2kjhcwg153j74n52wwcr5x0z70hc21hlsrxyclmc8ps1lmd";
+ url = "https://github.com/stackabletech//operator-rs.git";
+ rev = "69fbb2a0bc5329a8a3bf1904944474d3f3e13f64";
+ sha256 = "0m3d02fsc7z2mvr0rvlv0n30wi8jcv2m00fd82s23jnk3jpmkfry";
};
libName = "stackable_operator";
authors = [
@@ -8370,9 +8370,9 @@ rec {
edition = "2024";
workspace_member = null;
src = pkgs.fetchgit {
- url = "https://github.com/stackabletech/operator-rs.git";
- rev = "7bfcac5f6515c8b4c8cf8def2edfde5ed8621aaa";
- sha256 = "02z7c2kjhcwg153j74n52wwcr5x0z70hc21hlsrxyclmc8ps1lmd";
+ url = "https://github.com/stackabletech//operator-rs.git";
+ rev = "69fbb2a0bc5329a8a3bf1904944474d3f3e13f64";
+ sha256 = "0m3d02fsc7z2mvr0rvlv0n30wi8jcv2m00fd82s23jnk3jpmkfry";
};
procMacro = true;
libName = "stackable_operator_derive";
@@ -8405,9 +8405,9 @@ rec {
edition = "2024";
workspace_member = null;
src = pkgs.fetchgit {
- url = "https://github.com/stackabletech/operator-rs.git";
- rev = "7bfcac5f6515c8b4c8cf8def2edfde5ed8621aaa";
- sha256 = "02z7c2kjhcwg153j74n52wwcr5x0z70hc21hlsrxyclmc8ps1lmd";
+ url = "https://github.com/stackabletech//operator-rs.git";
+ rev = "69fbb2a0bc5329a8a3bf1904944474d3f3e13f64";
+ sha256 = "0m3d02fsc7z2mvr0rvlv0n30wi8jcv2m00fd82s23jnk3jpmkfry";
};
libName = "stackable_shared";
authors = [
@@ -8487,9 +8487,9 @@ rec {
edition = "2024";
workspace_member = null;
src = pkgs.fetchgit {
- url = "https://github.com/stackabletech/operator-rs.git";
- rev = "7bfcac5f6515c8b4c8cf8def2edfde5ed8621aaa";
- sha256 = "02z7c2kjhcwg153j74n52wwcr5x0z70hc21hlsrxyclmc8ps1lmd";
+ url = "https://github.com/stackabletech//operator-rs.git";
+ rev = "69fbb2a0bc5329a8a3bf1904944474d3f3e13f64";
+ sha256 = "0m3d02fsc7z2mvr0rvlv0n30wi8jcv2m00fd82s23jnk3jpmkfry";
};
libName = "stackable_telemetry";
authors = [
@@ -8597,9 +8597,9 @@ rec {
edition = "2024";
workspace_member = null;
src = pkgs.fetchgit {
- url = "https://github.com/stackabletech/operator-rs.git";
- rev = "7bfcac5f6515c8b4c8cf8def2edfde5ed8621aaa";
- sha256 = "02z7c2kjhcwg153j74n52wwcr5x0z70hc21hlsrxyclmc8ps1lmd";
+ url = "https://github.com/stackabletech//operator-rs.git";
+ rev = "69fbb2a0bc5329a8a3bf1904944474d3f3e13f64";
+ sha256 = "0m3d02fsc7z2mvr0rvlv0n30wi8jcv2m00fd82s23jnk3jpmkfry";
};
libName = "stackable_versioned";
authors = [
@@ -8641,9 +8641,9 @@ rec {
edition = "2024";
workspace_member = null;
src = pkgs.fetchgit {
- url = "https://github.com/stackabletech/operator-rs.git";
- rev = "7bfcac5f6515c8b4c8cf8def2edfde5ed8621aaa";
- sha256 = "02z7c2kjhcwg153j74n52wwcr5x0z70hc21hlsrxyclmc8ps1lmd";
+ url = "https://github.com/stackabletech//operator-rs.git";
+ rev = "69fbb2a0bc5329a8a3bf1904944474d3f3e13f64";
+ sha256 = "0m3d02fsc7z2mvr0rvlv0n30wi8jcv2m00fd82s23jnk3jpmkfry";
};
procMacro = true;
libName = "stackable_versioned_macros";
diff --git a/Cargo.toml b/Cargo.toml
index ad772177..9dc5502a 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -33,9 +33,6 @@ strum = { version = "0.27", features = ["derive"] }
tokio = { version = "1.40", features = ["full"] }
tracing = "0.1"
-# [patch."https://github.com/stackabletech/operator-rs.git"]
+[patch."https://github.com/stackabletech/operator-rs.git"]
# stackable-operator = { path = "../operator-rs/crates/stackable-operator" }
-# stackable-operator = { git = "https://github.com/stackabletech//operator-rs.git", branch = "main" }
-
-[patch.crates-io]
-kube = { git = "https://github.com/stackabletech/kube-rs", branch = "2.0.1-fix-schema-hoisting" }
+stackable-operator = { git = "https://github.com/stackabletech//operator-rs.git", branch = "spike/generic-databases" }
diff --git a/crate-hashes.json b/crate-hashes.json
index c7bba396..07f40d03 100644
--- a/crate-hashes.json
+++ b/crate-hashes.json
@@ -1,15 +1,15 @@
{
+ "git+https://github.com/stackabletech//operator-rs.git?branch=spike%2Fgeneric-databases#k8s-version@0.1.3": "0m3d02fsc7z2mvr0rvlv0n30wi8jcv2m00fd82s23jnk3jpmkfry",
+ "git+https://github.com/stackabletech//operator-rs.git?branch=spike%2Fgeneric-databases#stackable-operator-derive@0.3.1": "0m3d02fsc7z2mvr0rvlv0n30wi8jcv2m00fd82s23jnk3jpmkfry",
+ "git+https://github.com/stackabletech//operator-rs.git?branch=spike%2Fgeneric-databases#stackable-operator@0.105.0": "0m3d02fsc7z2mvr0rvlv0n30wi8jcv2m00fd82s23jnk3jpmkfry",
+ "git+https://github.com/stackabletech//operator-rs.git?branch=spike%2Fgeneric-databases#stackable-shared@0.0.3": "0m3d02fsc7z2mvr0rvlv0n30wi8jcv2m00fd82s23jnk3jpmkfry",
+ "git+https://github.com/stackabletech//operator-rs.git?branch=spike%2Fgeneric-databases#stackable-telemetry@0.6.1": "0m3d02fsc7z2mvr0rvlv0n30wi8jcv2m00fd82s23jnk3jpmkfry",
+ "git+https://github.com/stackabletech//operator-rs.git?branch=spike%2Fgeneric-databases#stackable-versioned-macros@0.8.3": "0m3d02fsc7z2mvr0rvlv0n30wi8jcv2m00fd82s23jnk3jpmkfry",
+ "git+https://github.com/stackabletech//operator-rs.git?branch=spike%2Fgeneric-databases#stackable-versioned@0.8.3": "0m3d02fsc7z2mvr0rvlv0n30wi8jcv2m00fd82s23jnk3jpmkfry",
"git+https://github.com/stackabletech/kube-rs?branch=2.0.1-fix-schema-hoisting#kube-client@2.0.1": "1a7bcl0w1jg71jc4iml0vjp8dpzy71mhxl012grxcy2xp5i6xvgf",
"git+https://github.com/stackabletech/kube-rs?branch=2.0.1-fix-schema-hoisting#kube-core@2.0.1": "1a7bcl0w1jg71jc4iml0vjp8dpzy71mhxl012grxcy2xp5i6xvgf",
"git+https://github.com/stackabletech/kube-rs?branch=2.0.1-fix-schema-hoisting#kube-derive@2.0.1": "1a7bcl0w1jg71jc4iml0vjp8dpzy71mhxl012grxcy2xp5i6xvgf",
"git+https://github.com/stackabletech/kube-rs?branch=2.0.1-fix-schema-hoisting#kube-runtime@2.0.1": "1a7bcl0w1jg71jc4iml0vjp8dpzy71mhxl012grxcy2xp5i6xvgf",
"git+https://github.com/stackabletech/kube-rs?branch=2.0.1-fix-schema-hoisting#kube@2.0.1": "1a7bcl0w1jg71jc4iml0vjp8dpzy71mhxl012grxcy2xp5i6xvgf",
- "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.105.0#k8s-version@0.1.3": "02z7c2kjhcwg153j74n52wwcr5x0z70hc21hlsrxyclmc8ps1lmd",
- "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.105.0#stackable-operator-derive@0.3.1": "02z7c2kjhcwg153j74n52wwcr5x0z70hc21hlsrxyclmc8ps1lmd",
- "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.105.0#stackable-operator@0.105.0": "02z7c2kjhcwg153j74n52wwcr5x0z70hc21hlsrxyclmc8ps1lmd",
- "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.105.0#stackable-shared@0.0.3": "02z7c2kjhcwg153j74n52wwcr5x0z70hc21hlsrxyclmc8ps1lmd",
- "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.105.0#stackable-telemetry@0.6.1": "02z7c2kjhcwg153j74n52wwcr5x0z70hc21hlsrxyclmc8ps1lmd",
- "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.105.0#stackable-versioned-macros@0.8.3": "02z7c2kjhcwg153j74n52wwcr5x0z70hc21hlsrxyclmc8ps1lmd",
- "git+https://github.com/stackabletech/operator-rs.git?tag=stackable-operator-0.105.0#stackable-versioned@0.8.3": "02z7c2kjhcwg153j74n52wwcr5x0z70hc21hlsrxyclmc8ps1lmd",
"git+https://github.com/stackabletech/product-config.git?tag=0.8.0#product-config@0.8.0": "1dz70kapm2wdqcr7ndyjji0lhsl98bsq95gnb2lw487wf6yr7987"
}
\ No newline at end of file
diff --git a/deploy/config-spec/properties.yaml b/deploy/config-spec/properties.yaml
index 78b63887..4ba37d15 100644
--- a/deploy/config-spec/properties.yaml
+++ b/deploy/config-spec/properties.yaml
@@ -58,62 +58,6 @@ properties:
comment: "TTL for domain names that cannot be resolved."
description: "TTL for domain names that cannot be resolved."
- - property:
- propertyNames:
- - name: "javax.jdo.option.ConnectionURL"
- kind:
- type: "file"
- file: "hive-site.xml"
- datatype:
- type: "string"
- # unit: *unitUri
- roles:
- - name: "metastore"
- required: true
- asOfVersion: "0.0.0"
- description: "JDBC connect string for a JDBC metastore.
- To use SSL to encrypt/authenticate the connection, provide database-specific SSL flag in the connection URL.
- For example, jdbc:postgresql://myhost/db?ssl=true for postgres database."
- - property:
- propertyNames:
- - name: "javax.jdo.option.ConnectionDriverName"
- kind:
- type: "file"
- file: "hive-site.xml"
- datatype:
- type: "string"
- unit: *unitClassName
- roles:
- - name: "metastore"
- # This property *is* required even though it is set to `false` here.
- # The required value changed in Derby in version 4.2.0.
- # We now set this value depending on the Hive version (and its corresponding Derby version) and add it to the ConfigMap programatically.
- required: false
- asOfVersion: "0.0.0"
- - property:
- propertyNames:
- - name: "javax.jdo.option.ConnectionUserName"
- kind:
- type: "file"
- file: "hive-site.xml"
- datatype:
- type: "string"
- roles:
- - name: "metastore"
- required: true
- asOfVersion: "0.0.0"
- - property:
- propertyNames:
- - name: "javax.jdo.option.ConnectionPassword"
- kind:
- type: "file"
- file: "hive-site.xml"
- datatype:
- type: "string"
- roles:
- - name: "metastore"
- required: true
- asOfVersion: "0.0.0"
- property:
propertyNames:
- name: "hive.metastore.port"
diff --git a/deploy/helm/hive-operator/configs/properties.yaml b/deploy/helm/hive-operator/configs/properties.yaml
index 78b63887..4ba37d15 100644
--- a/deploy/helm/hive-operator/configs/properties.yaml
+++ b/deploy/helm/hive-operator/configs/properties.yaml
@@ -58,62 +58,6 @@ properties:
comment: "TTL for domain names that cannot be resolved."
description: "TTL for domain names that cannot be resolved."
- - property:
- propertyNames:
- - name: "javax.jdo.option.ConnectionURL"
- kind:
- type: "file"
- file: "hive-site.xml"
- datatype:
- type: "string"
- # unit: *unitUri
- roles:
- - name: "metastore"
- required: true
- asOfVersion: "0.0.0"
- description: "JDBC connect string for a JDBC metastore.
- To use SSL to encrypt/authenticate the connection, provide database-specific SSL flag in the connection URL.
- For example, jdbc:postgresql://myhost/db?ssl=true for postgres database."
- - property:
- propertyNames:
- - name: "javax.jdo.option.ConnectionDriverName"
- kind:
- type: "file"
- file: "hive-site.xml"
- datatype:
- type: "string"
- unit: *unitClassName
- roles:
- - name: "metastore"
- # This property *is* required even though it is set to `false` here.
- # The required value changed in Derby in version 4.2.0.
- # We now set this value depending on the Hive version (and its corresponding Derby version) and add it to the ConfigMap programatically.
- required: false
- asOfVersion: "0.0.0"
- - property:
- propertyNames:
- - name: "javax.jdo.option.ConnectionUserName"
- kind:
- type: "file"
- file: "hive-site.xml"
- datatype:
- type: "string"
- roles:
- - name: "metastore"
- required: true
- asOfVersion: "0.0.0"
- - property:
- propertyNames:
- - name: "javax.jdo.option.ConnectionPassword"
- kind:
- type: "file"
- file: "hive-site.xml"
- datatype:
- type: "string"
- roles:
- - name: "metastore"
- required: true
- asOfVersion: "0.0.0"
- property:
propertyNames:
- name: "hive.metastore.port"
diff --git a/deploy/helm/hive-operator/crds/crds.yaml b/deploy/helm/hive-operator/crds/crds.yaml
index ece12c91..0800fff9 100644
--- a/deploy/helm/hive-operator/crds/crds.yaml
+++ b/deploy/helm/hive-operator/crds/crds.yaml
@@ -76,36 +76,6 @@ spec:
- configMapName
type: object
type: object
- database:
- description: Database connection specification for the metadata database.
- properties:
- connString:
- description: |-
- A connection string for the database. For example:
- `jdbc:postgresql://hivehdfs-postgresql:5432/hivehdfs`
- type: string
- credentialsSecret:
- description: |-
- A reference to a Secret containing the database credentials.
- The Secret needs to contain the keys `username` and `password`.
- type: string
- dbType:
- description: |-
- The type of database to connect to. Supported are:
- `postgres`, `mysql`, `oracle`, `mssql` and `derby`.
- This value is used to configure the jdbc driver class.
- enum:
- - derby
- - mysql
- - postgres
- - oracle
- - mssql
- type: string
- required:
- - connString
- - credentialsSecret
- - dbType
- type: object
hdfs:
description: HDFS connection specification.
nullable: true
@@ -120,6 +90,90 @@ spec:
required:
- configMap
type: object
+ metadataDatabase:
+ description: TODO docs
+ oneOf:
+ - required:
+ - postgresql
+ - required:
+ - mysql
+ - required:
+ - derby
+ properties:
+ derby:
+ description: TODO docs
+ properties:
+ location:
+ description: TODO docs, especially on default
+ nullable: true
+ type: string
+ type: object
+ mysql:
+ description: |-
+ TODO docs
+
+ Please note that - due to license issues - we don't ship the mysql driver, you need to add
+ it it yourself.
+ See for details.
+ properties:
+ credentialsSecret:
+ description: TODO docs
+ type: string
+ database:
+ description: TODO docs
+ type: string
+ host:
+ description: TODO docs
+ type: string
+ parameters:
+ additionalProperties:
+ type: string
+ default: {}
+ description: TODO docs
+ type: object
+ port:
+ default: 3306
+ description: TODO docs
+ format: uint16
+ maximum: 65535.0
+ minimum: 0.0
+ type: integer
+ required:
+ - credentialsSecret
+ - database
+ - host
+ type: object
+ postgresql:
+ description: TODO docs
+ properties:
+ credentialsSecret:
+ description: TODO docs
+ type: string
+ database:
+ description: TODO docs
+ type: string
+ host:
+ description: TODO docs
+ type: string
+ parameters:
+ additionalProperties:
+ type: string
+ default: {}
+ description: TODO docs
+ type: object
+ port:
+ default: 5432
+ description: TODO docs
+ format: uint16
+ maximum: 65535.0
+ minimum: 0.0
+ type: integer
+ required:
+ - credentialsSecret
+ - database
+ - host
+ type: object
+ type: object
s3:
description: |-
S3 connection specification. This can be either `inline` or a `reference` to an
@@ -280,7 +334,7 @@ spec:
nullable: true
type: string
required:
- - database
+ - metadataDatabase
type: object
clusterOperation:
default:
diff --git a/docs/modules/hive/examples/getting_started/hive-postgres-s3.yaml b/docs/modules/hive/examples/getting_started/hive-postgres-s3.yaml
index 48ad049c..d71c4fe8 100644
--- a/docs/modules/hive/examples/getting_started/hive-postgres-s3.yaml
+++ b/docs/modules/hive/examples/getting_started/hive-postgres-s3.yaml
@@ -7,10 +7,11 @@ spec:
image:
productVersion: 4.2.0
clusterConfig:
- database:
- connString: jdbc:postgresql://postgresql:5432/hive
- credentialsSecret: hive-credentials
- dbType: postgres
+ metadataDatabase:
+ postgresql:
+ host: postgresql
+ database: hive
+ credentialsSecret: hive-credentials
s3:
reference: minio
metastore:
diff --git a/docs/modules/hive/examples/getting_started/hive-postgres-s3.yaml.j2 b/docs/modules/hive/examples/getting_started/hive-postgres-s3.yaml.j2
index 48ad049c..d71c4fe8 100644
--- a/docs/modules/hive/examples/getting_started/hive-postgres-s3.yaml.j2
+++ b/docs/modules/hive/examples/getting_started/hive-postgres-s3.yaml.j2
@@ -7,10 +7,11 @@ spec:
image:
productVersion: 4.2.0
clusterConfig:
- database:
- connString: jdbc:postgresql://postgresql:5432/hive
- credentialsSecret: hive-credentials
- dbType: postgres
+ metadataDatabase:
+ postgresql:
+ host: postgresql
+ database: hive
+ credentialsSecret: hive-credentials
s3:
reference: minio
metastore:
diff --git a/docs/modules/hive/pages/reference/discovery.adoc b/docs/modules/hive/pages/reference/discovery.adoc
index bed75059..213e07d1 100644
--- a/docs/modules/hive/pages/reference/discovery.adoc
+++ b/docs/modules/hive/pages/reference/discovery.adoc
@@ -23,10 +23,11 @@ metadata:
namespace: {namespace} # <2>
spec:
clusterConfig:
- database:
- connString: jdbc:postgresql://postgresql:5432/hive
- credentialsSecret: hive-credentials
- dbType: postgres
+ metadataDatabase:
+ postgresql:
+ host: postgresql
+ database: hive
+ credentialsSecret: hive-credentials
metastore:
roleGroups:
default: # <3>
diff --git a/docs/modules/hive/pages/usage-guide/database-driver.adoc b/docs/modules/hive/pages/usage-guide/database-driver.adoc
index 18a26cc8..1dbc22ea 100644
--- a/docs/modules/hive/pages/usage-guide/database-driver.adoc
+++ b/docs/modules/hive/pages/usage-guide/database-driver.adoc
@@ -149,10 +149,11 @@ spec:
image:
productVersion: 4.2.0
clusterConfig:
- database:
- connString: jdbc:mysql://mysql:3306/hive # <1>
- credentialsSecret: hive-credentials # <2>
- dbType: mysql
+ metadataDatabase:
+ mysql:
+ host: mysql
+ database: hive
+ credentialsSecret: hive-credentials
s3:
reference: minio # <3>
metastore:
@@ -224,7 +225,7 @@ spec:
custom: oci.stackable.tech/sdp/hive:4.2.0-stackable0.0.0-dev-mysql # <1>
productVersion: 4.2.0
clusterConfig:
- database:
+ metadataDatabase:
...
s3:
...
diff --git a/docs/modules/hive/pages/usage-guide/derby-example.adoc b/docs/modules/hive/pages/usage-guide/derby-example.adoc
index 83285c8b..a3ac2306 100644
--- a/docs/modules/hive/pages/usage-guide/derby-example.adoc
+++ b/docs/modules/hive/pages/usage-guide/derby-example.adoc
@@ -18,23 +18,12 @@ spec:
image:
productVersion: 4.2.0
clusterConfig:
- database:
- connString: jdbc:derby:;databaseName=/tmp/metastore_db;create=true
- credentialsSecret: hive-credentials
- dbType: derby
+ metadataDatabase:
+ derby: {}
metastore:
roleGroups:
default:
replicas: 1
----
-apiVersion: v1
-kind: Secret
-metadata:
- name: hive-credentials
-type: Opaque
-stringData:
- username: APP
- password: mine
----
WARNING: You should not use the `Derby` database in production. Derby stores data locally which does not work in high availability setups (multiple replicas) and all data is lost after Pod restarts.
@@ -72,10 +61,8 @@ spec:
image:
productVersion: 4.2.0
clusterConfig:
- database:
- connString: jdbc:derby:;databaseName=/stackable/metastore_db;create=true
- credentialsSecret: hive-credentials
- dbType: derby
+ metadataDatabase:
+ derby: {}
s3:
inline:
host: minio
@@ -107,15 +94,6 @@ metadata:
stringData:
accessKey: minio-access-key
secretKey: minio-secret-key
----
-apiVersion: v1
-kind: Secret
-metadata:
- name: hive-credentials
-type: Opaque
-stringData:
- username: APP
- password: mine
----
@@ -153,10 +131,11 @@ spec:
image:
productVersion: 4.2.0
clusterConfig:
- database:
- connString: jdbc:postgresql://hive-postgresql.default.svc.cluster.local:5432/hive
- credentialsSecret: hive-credentials
- dbType: postgres
+ metadataDatabase:
+ postgresql:
+ host: hive-postgresql.default.svc.cluster.local
+ database: hive
+ credentialsSecret: hive-credentials
metastore:
roleGroups:
default:
diff --git a/examples/hive-opa-cluster.yaml b/examples/hive-opa-cluster.yaml
index 5ec312f5..2d17fc2b 100644
--- a/examples/hive-opa-cluster.yaml
+++ b/examples/hive-opa-cluster.yaml
@@ -30,10 +30,11 @@ spec:
opa:
configMapName: opa
package: hms
- database:
- connString: jdbc:postgresql://postgresql:5432/hive
- credentialsSecret: hive-postgresql-credentials
- dbType: postgres
+ metadataDatabase:
+ postgresql:
+ host: postgresql
+ database: hive
+ credentialsSecret: hive-postgresql-credentials
metastore:
roleGroups:
default:
diff --git a/examples/simple-hive-cluster-postgres-s3.yaml b/examples/simple-hive-cluster-postgres-s3.yaml
index f14843af..d126034e 100644
--- a/examples/simple-hive-cluster-postgres-s3.yaml
+++ b/examples/simple-hive-cluster-postgres-s3.yaml
@@ -30,10 +30,11 @@ spec:
image:
productVersion: 4.2.0
clusterConfig:
- database:
- connString: jdbc:postgresql://hive-postgresql.default.svc.cluster.local:5432/hive
- credentialsSecret: hive-credentials
- dbType: postgres
+ metadataDatabase:
+ postgresql:
+ host: hive-postgresql.default.svc.cluster.local
+ database: hive
+ credentialsSecret: hive-credentials
s3:
inline:
host: test-minio
diff --git a/examples/simple-hive-cluster.yaml b/examples/simple-hive-cluster.yaml
index e239dd61..ae269512 100644
--- a/examples/simple-hive-cluster.yaml
+++ b/examples/simple-hive-cluster.yaml
@@ -11,10 +11,8 @@ spec:
image:
productVersion: 4.2.0
clusterConfig:
- database:
- connString: jdbc:derby:;databaseName=/tmp/hive;create=true
- credentialsSecret: hive-credentials
- dbType: derby
+ metadataDatabase:
+ derby: {}
metastore:
roleGroups:
default:
diff --git a/rust/operator-binary/src/command.rs b/rust/operator-binary/src/command.rs
index ee08f56d..0ce91957 100644
--- a/rust/operator-binary/src/command.rs
+++ b/rust/operator-binary/src/command.rs
@@ -3,10 +3,9 @@ use stackable_operator::crd::s3;
use crate::{
config::opa::HiveOpaConfig,
crd::{
- DB_PASSWORD_ENV, DB_PASSWORD_PLACEHOLDER, DB_USERNAME_ENV, DB_USERNAME_PLACEHOLDER,
- HIVE_METASTORE_LOG4J2_PROPERTIES, HIVE_SITE_XML, STACKABLE_CONFIG_DIR,
- STACKABLE_CONFIG_MOUNT_DIR, STACKABLE_LOG_CONFIG_MOUNT_DIR, STACKABLE_TRUST_STORE,
- STACKABLE_TRUST_STORE_PASSWORD, v1alpha1,
+ HIVE_METASTORE_LOG4J2_PROPERTIES, STACKABLE_CONFIG_DIR, STACKABLE_CONFIG_MOUNT_DIR,
+ STACKABLE_LOG_CONFIG_MOUNT_DIR, STACKABLE_TRUST_STORE, STACKABLE_TRUST_STORE_PASSWORD,
+ v1alpha1,
},
};
@@ -63,13 +62,6 @@ pub fn build_container_command_args(
}
}
- // db credentials
- args.extend([
- format!("echo replacing {DB_USERNAME_PLACEHOLDER} and {DB_PASSWORD_PLACEHOLDER} with secret values."),
- format!("sed -i \"s|{DB_USERNAME_PLACEHOLDER}|${DB_USERNAME_ENV}|g\" {STACKABLE_CONFIG_DIR}/{HIVE_SITE_XML}"),
- format!("sed -i \"s|{DB_PASSWORD_PLACEHOLDER}|${DB_PASSWORD_ENV}|g\" {STACKABLE_CONFIG_DIR}/{HIVE_SITE_XML}"),
- ]);
-
// metastore start command
args.push(start_command);
diff --git a/rust/operator-binary/src/config/jvm.rs b/rust/operator-binary/src/config/jvm.rs
index 0acfaca6..9f7bc9f2 100644
--- a/rust/operator-binary/src/config/jvm.rs
+++ b/rust/operator-binary/src/config/jvm.rs
@@ -109,10 +109,8 @@ mod tests {
image:
productVersion: 4.2.0
clusterConfig:
- database:
- connString: jdbc:derby:;databaseName=/tmp/hive;create=true
- dbType: derby
- credentialsSecret: mySecret
+ metadataDatabase:
+ derby: {}
metastore:
roleGroups:
default:
@@ -144,10 +142,8 @@ mod tests {
image:
productVersion: 4.2.0
clusterConfig:
- database:
- connString: jdbc:derby:;databaseName=/tmp/hive;create=true
- dbType: derby
- credentialsSecret: mySecret
+ metadataDatabase:
+ derby: {}
metastore:
config:
resources:
@@ -196,7 +192,10 @@ mod tests {
Role,
String,
) {
- let hive: HiveCluster = serde_yaml::from_str(hive_cluster).expect("illegal test input");
+ let deserializer = serde_yaml::Deserializer::from_str(hive_cluster);
+ let hive: HiveCluster =
+ serde_yaml::with::singleton_map_recursive::deserialize(deserializer)
+ .expect("invalid test input");
let hive_role = HiveRole::MetaStore;
let rolegroup_ref = hive.metastore_rolegroup_ref("default");
diff --git a/rust/operator-binary/src/controller.rs b/rust/operator-binary/src/controller.rs
index 036f2798..578e2603 100644
--- a/rust/operator-binary/src/controller.rs
+++ b/rust/operator-binary/src/controller.rs
@@ -38,14 +38,16 @@ use stackable_operator::{
rbac::build_rbac_resources,
},
constants::RESTART_CONTROLLER_ENABLED_LABEL,
- crd::{listener::v1alpha1::Listener, s3},
+ crd::{
+ database::drivers::jdbc::JDBCDatabaseConnectionDetails, listener::v1alpha1::Listener, s3,
+ },
k8s_openapi::{
DeepMerge,
api::{
apps::v1::{StatefulSet, StatefulSetSpec},
core::v1::{
- ConfigMap, ConfigMapVolumeSource, EmptyDirVolumeSource, Probe, TCPSocketAction,
- Volume,
+ ConfigMap, ConfigMapVolumeSource, EmptyDirVolumeSource, EnvVar, Probe,
+ TCPSocketAction, Volume,
},
},
apimachinery::pkg::{
@@ -90,9 +92,9 @@ use crate::{
opa::{HiveOpaConfig, OPA_TLS_VOLUME_NAME},
},
crd::{
- APP_NAME, CORE_SITE_XML, Container, DB_PASSWORD_ENV, DB_USERNAME_ENV, HIVE_PORT,
- HIVE_PORT_NAME, HIVE_SITE_XML, HiveClusterStatus, HiveRole, JVM_SECURITY_PROPERTIES_FILE,
- METRICS_PORT, METRICS_PORT_NAME, MetaStoreConfig, STACKABLE_CONFIG_DIR,
+ APP_NAME, CORE_SITE_XML, Container, HIVE_PORT, HIVE_PORT_NAME, HIVE_SITE_XML,
+ HiveClusterStatus, HiveRole, JVM_SECURITY_PROPERTIES_FILE, METRICS_PORT, METRICS_PORT_NAME,
+ MetaStoreConfig, MetadataDatabaseConnection, STACKABLE_CONFIG_DIR,
STACKABLE_CONFIG_DIR_NAME, STACKABLE_CONFIG_MOUNT_DIR, STACKABLE_CONFIG_MOUNT_DIR_NAME,
STACKABLE_LOG_CONFIG_MOUNT_DIR, STACKABLE_LOG_CONFIG_MOUNT_DIR_NAME, STACKABLE_LOG_DIR,
STACKABLE_LOG_DIR_NAME,
@@ -329,6 +331,11 @@ pub enum Error {
TlsCertSecretClassVolumeBuild {
source: stackable_operator::builder::pod::volume::SecretOperatorVolumeSourceBuilderError,
},
+
+ #[snafu(display("invalid metadata database connection"))]
+ InvalidMetadataDatabaseConnection {
+ source: stackable_operator::crd::database::Error,
+ },
}
type Result = std::result::Result;
@@ -374,6 +381,14 @@ pub async fn reconcile_hive(
None
};
+ let metadata_database_connection_details = hive
+ .spec
+ .cluster_config
+ .metadata_database
+ .as_jdbc_database_connection()
+ .jdbc_connection_details("METADATA")
+ .context(InvalidMetadataDatabaseConnectionSnafu)?;
+
let validated_config = validate_all_roles_and_groups_config(
&resolved_product_image.product_version,
&transform_all_roles_to_config(
@@ -465,6 +480,7 @@ pub async fn reconcile_hive(
&resolved_product_image,
&rolegroup,
rolegroup_config,
+ &metadata_database_connection_details,
s3_connection_spec.as_ref(),
&config,
&client.kubernetes_cluster_info,
@@ -476,6 +492,7 @@ pub async fn reconcile_hive(
&resolved_product_image,
&rolegroup,
rolegroup_config,
+ &metadata_database_connection_details,
s3_connection_spec.as_ref(),
&config,
&rbac_sa.name_any(),
@@ -595,6 +612,7 @@ fn build_metastore_rolegroup_config_map(
resolved_product_image: &ResolvedProductImage,
rolegroup: &RoleGroupRef,
role_group_config: &HashMap>,
+ database_connection_details: &JDBCDatabaseConnectionDetails,
s3_connection_spec: Option<&s3::v1alpha1::ConnectionSpec>,
merged_config: &MetaStoreConfig,
cluster_info: &KubernetesClusterInfo,
@@ -612,14 +630,48 @@ fn build_metastore_rolegroup_config_map(
Some("/stackable/warehouse".to_string()),
);
+ // The Derby driver class needs some special handling
+ let driver = match &hive.spec.cluster_config.metadata_database {
+ MetadataDatabaseConnection::Derby(_) => {
+ // The driver class changed for hive 4.2.0
+ if ["3.1.3", "4.0.0", "4.0.1", "4.1.0"]
+ .contains(&resolved_product_image.product_version.as_str())
+ {
+ "org.apache.derby.jdbc.EmbeddedDriver"
+ } else {
+ "org.apache.derby.iapi.jdbc.AutoloadedDriver"
+ }
+ }
+ _ => database_connection_details.driver.as_str(),
+ };
data.insert(
MetaStoreConfig::CONNECTION_DRIVER_NAME.to_string(),
- Some(
- hive.db_type()
- .get_jdbc_driver_class(&resolved_product_image.product_version)
- .to_string(),
- ),
+ Some(driver.to_owned()),
+ );
+ data.insert(
+ MetaStoreConfig::CONNECTION_URL.to_string(),
+ Some(database_connection_details.connection_uri.to_string()),
);
+ if let Some(EnvVar {
+ name: username_env_name,
+ ..
+ }) = &database_connection_details.username_env
+ {
+ data.insert(
+ MetaStoreConfig::CONNECTION_USER_NAME.to_string(),
+ Some(format!("${{env:{username_env_name}}}",)),
+ );
+ }
+ if let Some(EnvVar {
+ name: password_env_name,
+ ..
+ }) = &database_connection_details.password_env
+ {
+ data.insert(
+ MetaStoreConfig::CONNECTION_PASSWORD.to_string(),
+ Some(format!("${{env:{password_env_name}}}",)),
+ );
+ }
if let Some(s3) = s3_connection_spec {
data.insert(
@@ -759,6 +811,7 @@ fn build_metastore_rolegroup_statefulset(
resolved_product_image: &ResolvedProductImage,
rolegroup_ref: &RoleGroupRef,
metastore_config: &HashMap>,
+ database_connection_details: &JDBCDatabaseConnectionDetails,
s3_connection: Option<&s3::v1alpha1::ConnectionSpec>,
merged_config: &MetaStoreConfig,
sa_name: &str,
@@ -774,13 +827,7 @@ fn build_metastore_rolegroup_statefulset(
name: APP_NAME.to_string(),
})?;
- let credentials_secret_name = hive.spec.cluster_config.database.credentials_secret.clone();
-
container_builder
- // load database credentials to environment variables: these will be used to replace
- // the placeholders in hive-site.xml so that the operator does not "touch" the secret.
- .add_env_var_from_secret(DB_USERNAME_ENV, &credentials_secret_name, "username")
- .add_env_var_from_secret(DB_PASSWORD_ENV, &credentials_secret_name, "password")
.add_env_var(
"HADOOP_HEAPSIZE",
construct_hadoop_heapsize_env(merged_config).context(ConstructJvmArgumentsSnafu)?,
@@ -794,6 +841,7 @@ fn build_metastore_rolegroup_statefulset(
"CONTAINERDEBUG_LOG_DIRECTORY",
format!("{STACKABLE_LOG_DIR}/containerdebug"),
);
+ database_connection_details.add_to_container(&mut container_builder);
for (property_name_kind, config) in metastore_config {
if property_name_kind == &PropertyNameKind::Env {
@@ -862,7 +910,7 @@ fn build_metastore_rolegroup_statefulset(
.context(AddVolumeSnafu)?;
}
- let db_type = hive.db_type();
+ let db_type = hive.spec.cluster_config.metadata_database.as_hive_db_type();
let start_command = if resolved_product_image.product_version.starts_with("3.") {
// The schematool version in 3.1.x does *not* support the `-initOrUpgradeSchema` flag yet, so we can not use that.
// As we *only* support HMS 3.1.x (or newer) since SDP release 23.11, we can safely assume we are always coming
diff --git a/rust/operator-binary/src/crd/affinity.rs b/rust/operator-binary/src/crd/affinity.rs
index 40ed8052..e187d9a4 100644
--- a/rust/operator-binary/src/crd/affinity.rs
+++ b/rust/operator-binary/src/crd/affinity.rs
@@ -47,16 +47,17 @@ mod tests {
image:
productVersion: 4.2.0
clusterConfig:
- database:
- connString: jdbc:derby:;databaseName=/tmp/hive;create=true
- dbType: derby
- credentialsSecret: mySecret
+ metadataDatabase:
+ derby: {}
metastore:
roleGroups:
default:
replicas: 1
"#;
- let hive: v1alpha1::HiveCluster = serde_yaml::from_str(input).expect("illegal test input");
+ let deserializer = serde_yaml::Deserializer::from_str(input);
+ let hive: v1alpha1::HiveCluster =
+ serde_yaml::with::singleton_map_recursive::deserialize(deserializer)
+ .expect("invalid test input");
let merged_config = hive
.merged_config(&role, &role.rolegroup_ref(&hive, "default"))
.unwrap();
diff --git a/rust/operator-binary/src/crd/mod.rs b/rust/operator-binary/src/crd/mod.rs
index 62ef24f2..8dbe017e 100644
--- a/rust/operator-binary/src/crd/mod.rs
+++ b/rust/operator-binary/src/crd/mod.rs
@@ -18,7 +18,15 @@ use stackable_operator::{
fragment::{self, Fragment, ValidationError},
merge::Merge,
},
- crd::s3,
+ crd::{
+ database::{
+ databases::{
+ derby::DerbyConnection, mysql::MysqlConnection, postgresql::PostgresqlConnection,
+ },
+ drivers::jdbc::JDBCDatabaseConnection,
+ },
+ s3,
+ },
deep_merger::ObjectOverrides,
k8s_openapi::apimachinery::pkg::api::resource::Quantity,
kube::{CustomResource, ResourceExt, runtime::reflector::ObjectRef},
@@ -67,12 +75,6 @@ pub const METRICS_PORT: u16 = 9084;
pub const STACKABLE_TRUST_STORE: &str = "/stackable/truststore.p12";
pub const STACKABLE_TRUST_STORE_PASSWORD: &str = "changeit";
-// DB credentials
-pub const DB_USERNAME_PLACEHOLDER: &str = "xxx_db_username_xxx";
-pub const DB_PASSWORD_PLACEHOLDER: &str = "xxx_db_password_xxx";
-pub const DB_USERNAME_ENV: &str = "DB_USERNAME_ENV";
-pub const DB_PASSWORD_ENV: &str = "DB_PASSWORD_ENV";
-
const DEFAULT_METASTORE_GRACEFUL_SHUTDOWN_TIMEOUT: Duration = Duration::from_minutes_unchecked(5);
#[derive(Snafu, Debug)]
@@ -154,7 +156,7 @@ pub mod versioned {
pub listener_class: String,
}
- #[derive(Clone, Debug, Deserialize, Eq, JsonSchema, PartialEq, Serialize)]
+ #[derive(Clone, Debug, Deserialize, JsonSchema, PartialEq, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct HiveClusterConfig {
/// Settings related to user [authentication](DOCS_BASE_URL_PLACEHOLDER/hive/usage-guide/security).
@@ -164,8 +166,8 @@ pub mod versioned {
/// Learn more in the [Hive authorization usage guide](DOCS_BASE_URL_PLACEHOLDER/hive/usage-guide/security#authorization).
pub authorization: Option,
- // no doc - docs in DatabaseConnectionSpec struct.
- pub database: DatabaseConnectionSpec,
+ /// TODO docs
+ pub metadata_database: MetadataDatabaseConnection,
/// HDFS connection specification.
#[serde(default, skip_serializing_if = "Option::is_none")]
@@ -295,10 +297,6 @@ impl v1alpha1::HiveCluster {
.map(|k| k.secret_class.clone())
}
- pub fn db_type(&self) -> &DbType {
- &self.spec.cluster_config.database.db_type
- }
-
pub fn get_opa_config(&self) -> Option<&OpaConfig> {
self.spec
.cluster_config
@@ -500,66 +498,42 @@ impl MetaStoreConfig {
}
}
-#[derive(
- Clone, Debug, Deserialize, Eq, Hash, JsonSchema, PartialEq, Serialize, Display, EnumString,
-)]
-pub enum DbType {
- #[serde(rename = "derby")]
- #[strum(serialize = "derby")]
- Derby,
-
- #[serde(rename = "mysql")]
- #[strum(serialize = "mysql")]
- Mysql,
-
- #[serde(rename = "postgres")]
- #[strum(serialize = "postgres")]
- Postgres,
-
- #[serde(rename = "oracle")]
- #[strum(serialize = "oracle")]
- Oracle,
-
- #[serde(rename = "mssql")]
- #[strum(serialize = "mssql")]
- Mssql,
+#[derive(Clone, Debug, Deserialize, JsonSchema, PartialEq, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub enum MetadataDatabaseConnection {
+ /// TODO docs
+ Postgresql(PostgresqlConnection),
+
+ /// TODO docs
+ ///
+ /// Please note that - due to license issues - we don't ship the mysql driver, you need to add
+ /// it it yourself.
+ /// See for details.
+ Mysql(MysqlConnection),
+
+ /// TODO docs
+ Derby(DerbyConnection),
+ // We don't support generic (yet?), as we need to tell the metastore the `--dbtype` on startup,
+ // which is not known for generic connection.
+ // Generic(GenericJDBCDatabaseConnection),
}
-impl DbType {
- pub fn get_jdbc_driver_class(&self, product_version: &str) -> &str {
+impl MetadataDatabaseConnection {
+ pub fn as_jdbc_database_connection(&self) -> &dyn JDBCDatabaseConnection {
match self {
- DbType::Derby => {
- // The driver class changed for hive 4.2.0
- if ["3.1.3", "4.0.0", "4.0.1", "4.1.0"].contains(&product_version) {
- "org.apache.derby.jdbc.EmbeddedDriver"
- } else {
- "org.apache.derby.iapi.jdbc.AutoloadedDriver"
- }
- }
- DbType::Mysql => "com.mysql.jdbc.Driver",
- DbType::Postgres => "org.postgresql.Driver",
- DbType::Mssql => "com.microsoft.sqlserver.jdbc.SQLServerDriver",
- DbType::Oracle => "oracle.jdbc.driver.OracleDriver",
+ Self::Postgresql(p) => p,
+ Self::Mysql(m) => m,
+ Self::Derby(d) => d,
}
}
-}
-/// Database connection specification for the metadata database.
-#[derive(Clone, Debug, Deserialize, Eq, Hash, JsonSchema, PartialEq, Serialize)]
-#[serde(rename_all = "camelCase")]
-pub struct DatabaseConnectionSpec {
- /// A connection string for the database. For example:
- /// `jdbc:postgresql://hivehdfs-postgresql:5432/hivehdfs`
- pub conn_string: String,
-
- /// The type of database to connect to. Supported are:
- /// `postgres`, `mysql`, `oracle`, `mssql` and `derby`.
- /// This value is used to configure the jdbc driver class.
- pub db_type: DbType,
-
- /// A reference to a Secret containing the database credentials.
- /// The Secret needs to contain the keys `username` and `password`.
- pub credentials_secret: String,
+ pub fn as_hive_db_type(&self) -> &str {
+ match self {
+ MetadataDatabaseConnection::Postgresql(_) => "postgres",
+ MetadataDatabaseConnection::Mysql(_) => "mysql",
+ MetadataDatabaseConnection::Derby(_) => "derby",
+ }
+ }
}
impl Configuration for MetaStoreConfigFragment {
@@ -584,7 +558,7 @@ impl Configuration for MetaStoreConfigFragment {
fn compute_files(
&self,
- hive: &Self::Configurable,
+ _hive: &Self::Configurable,
_role_name: &str,
file: &str,
) -> Result>, product_config_utils::Error> {
@@ -597,19 +571,6 @@ impl Configuration for MetaStoreConfigFragment {
Some(warehouse_dir.to_string()),
);
}
- result.insert(
- MetaStoreConfig::CONNECTION_URL.to_string(),
- Some(hive.spec.cluster_config.database.conn_string.clone()),
- );
- // use a placeholder that will be replaced in the start command (also for the password)
- result.insert(
- MetaStoreConfig::CONNECTION_USER_NAME.to_string(),
- Some(DB_USERNAME_PLACEHOLDER.into()),
- );
- result.insert(
- MetaStoreConfig::CONNECTION_PASSWORD.to_string(),
- Some(DB_PASSWORD_PLACEHOLDER.into()),
- );
result.insert(
MetaStoreConfig::METASTORE_METRICS_ENABLED.to_string(),
Some("true".to_string()),
diff --git a/tests/templates/kuttl/cluster-operation/10-install-hive.yaml.j2 b/tests/templates/kuttl/cluster-operation/10-install-hive.yaml.j2
index e7f4f782..25ecb9aa 100644
--- a/tests/templates/kuttl/cluster-operation/10-install-hive.yaml.j2
+++ b/tests/templates/kuttl/cluster-operation/10-install-hive.yaml.j2
@@ -13,10 +13,8 @@ spec:
{% endif %}
pullPolicy: IfNotPresent
clusterConfig:
- database:
- connString: jdbc:derby:;databaseName=/tmp/hive;create=true
- credentialsSecret: hive-credentials
- dbType: derby
+ metadataDatabase:
+ derby: {}
{% if lookup('env', 'VECTOR_AGGREGATOR') %}
vectorAggregatorConfigMapName: vector-aggregator-discovery
{% endif %}
@@ -27,12 +25,3 @@ spec:
roleGroups:
default:
replicas: 1
----
-apiVersion: v1
-kind: Secret
-metadata:
- name: hive-credentials
-type: Opaque
-stringData:
- username: APP
- password: mine
diff --git a/tests/templates/kuttl/cluster-operation/20-stop-hive.yaml.j2 b/tests/templates/kuttl/cluster-operation/20-stop-hive.yaml.j2
index 9bb428d7..37b0476f 100644
--- a/tests/templates/kuttl/cluster-operation/20-stop-hive.yaml.j2
+++ b/tests/templates/kuttl/cluster-operation/20-stop-hive.yaml.j2
@@ -4,29 +4,6 @@ kind: HiveCluster
metadata:
name: test-hive
spec:
- image:
-{% if test_scenario['values']['hive-latest'].find(",") > 0 %}
- custom: "{{ test_scenario['values']['hive-latest'].split(',')[1] }}"
- productVersion: "{{ test_scenario['values']['hive-latest'].split(',')[0] }}"
-{% else %}
- productVersion: "{{ test_scenario['values']['hive-latest'] }}"
-{% endif %}
- pullPolicy: IfNotPresent
- clusterConfig:
- database:
- connString: jdbc:derby:;databaseName=/tmp/hive;create=true
- credentialsSecret: hive-credentials
- dbType: derby
-{% if lookup('env', 'VECTOR_AGGREGATOR') %}
- vectorAggregatorConfigMapName: vector-aggregator-discovery
-{% endif %}
clusterOperation:
stopped: true
reconciliationPaused: false
- metastore:
- config:
- logging:
- enableVectorAgent: {{ lookup('env', 'VECTOR_AGGREGATOR') | length > 0 }}
- roleGroups:
- default:
- replicas: 1
diff --git a/tests/templates/kuttl/cluster-operation/30-pause-hive.yaml.j2 b/tests/templates/kuttl/cluster-operation/30-pause-hive.yaml.j2
index 17001faf..ca23843d 100644
--- a/tests/templates/kuttl/cluster-operation/30-pause-hive.yaml.j2
+++ b/tests/templates/kuttl/cluster-operation/30-pause-hive.yaml.j2
@@ -4,29 +4,6 @@ kind: HiveCluster
metadata:
name: test-hive
spec:
- image:
-{% if test_scenario['values']['hive-latest'].find(",") > 0 %}
- custom: "{{ test_scenario['values']['hive-latest'].split(',')[1] }}"
- productVersion: "{{ test_scenario['values']['hive-latest'].split(',')[0] }}"
-{% else %}
- productVersion: "{{ test_scenario['values']['hive-latest'] }}"
-{% endif %}
- pullPolicy: IfNotPresent
- clusterConfig:
- database:
- connString: jdbc:derby:;databaseName=/tmp/hive;create=true
- credentialsSecret: hive-credentials
- dbType: derby
-{% if lookup('env', 'VECTOR_AGGREGATOR') %}
- vectorAggregatorConfigMapName: vector-aggregator-discovery
-{% endif %}
clusterOperation:
stopped: false
reconciliationPaused: true
- metastore:
- config:
- logging:
- enableVectorAgent: {{ lookup('env', 'VECTOR_AGGREGATOR') | length > 0 }}
- roleGroups:
- default:
- replicas: 1
diff --git a/tests/templates/kuttl/cluster-operation/40-restart-hive.yaml.j2 b/tests/templates/kuttl/cluster-operation/40-restart-hive.yaml.j2
index 91905872..ac4b348c 100644
--- a/tests/templates/kuttl/cluster-operation/40-restart-hive.yaml.j2
+++ b/tests/templates/kuttl/cluster-operation/40-restart-hive.yaml.j2
@@ -4,29 +4,6 @@ kind: HiveCluster
metadata:
name: test-hive
spec:
- image:
-{% if test_scenario['values']['hive-latest'].find(",") > 0 %}
- custom: "{{ test_scenario['values']['hive-latest'].split(',')[1] }}"
- productVersion: "{{ test_scenario['values']['hive-latest'].split(',')[0] }}"
-{% else %}
- productVersion: "{{ test_scenario['values']['hive-latest'] }}"
-{% endif %}
- pullPolicy: IfNotPresent
- clusterConfig:
- database:
- connString: jdbc:derby:;databaseName=/tmp/hive;create=true
- credentialsSecret: hive-credentials
- dbType: derby
-{% if lookup('env', 'VECTOR_AGGREGATOR') %}
- vectorAggregatorConfigMapName: vector-aggregator-discovery
-{% endif %}
clusterOperation:
stopped: false
reconciliationPaused: false
- metastore:
- config:
- logging:
- enableVectorAgent: {{ lookup('env', 'VECTOR_AGGREGATOR') | length > 0 }}
- roleGroups:
- default:
- replicas: 1
diff --git a/tests/templates/kuttl/external-access/install-hive.yaml.j2 b/tests/templates/kuttl/external-access/install-hive.yaml.j2
index 84b8d47c..2f100403 100644
--- a/tests/templates/kuttl/external-access/install-hive.yaml.j2
+++ b/tests/templates/kuttl/external-access/install-hive.yaml.j2
@@ -13,10 +13,8 @@ spec:
{% endif %}
pullPolicy: IfNotPresent
clusterConfig:
- database:
- connString: jdbc:derby:;databaseName=/tmp/hive;create=true
- credentialsSecret: hive-credentials
- dbType: derby
+ metadataDatabase:
+ derby: {}
{% if lookup('env', 'VECTOR_AGGREGATOR') %}
vectorAggregatorConfigMapName: vector-aggregator-discovery
{% endif %}
diff --git a/tests/templates/kuttl/kerberos-hdfs/60-install-hive.yaml.j2 b/tests/templates/kuttl/kerberos-hdfs/60-install-hive.yaml.j2
index 840be987..ee3faed4 100644
--- a/tests/templates/kuttl/kerberos-hdfs/60-install-hive.yaml.j2
+++ b/tests/templates/kuttl/kerberos-hdfs/60-install-hive.yaml.j2
@@ -19,10 +19,11 @@ commands:
{% endif %}
pullPolicy: IfNotPresent
clusterConfig:
- database:
- connString: jdbc:postgresql://postgresql:5432/hive
- credentialsSecret: hive-credentials
- dbType: postgres
+ metadataDatabase:
+ postgresql:
+ host: postgresql
+ database: hive
+ credentialsSecret: hive-credentials
hdfs:
configMap: hdfs
authentication:
diff --git a/tests/templates/kuttl/kerberos-s3/60-install-hive.yaml.j2 b/tests/templates/kuttl/kerberos-s3/60-install-hive.yaml.j2
index d60c0ef9..a87f676b 100644
--- a/tests/templates/kuttl/kerberos-s3/60-install-hive.yaml.j2
+++ b/tests/templates/kuttl/kerberos-s3/60-install-hive.yaml.j2
@@ -19,10 +19,11 @@ commands:
{% endif %}
pullPolicy: IfNotPresent
clusterConfig:
- database:
- connString: jdbc:postgresql://postgresql:5432/hive
- credentialsSecret: hive-credentials
- dbType: postgres
+ metadataDatabase:
+ postgresql:
+ host: postgresql
+ database: hive
+ credentialsSecret: hive-credentials
s3:
reference: minio
authentication:
diff --git a/tests/templates/kuttl/logging/04-install-hive.yaml.j2 b/tests/templates/kuttl/logging/04-install-hive.yaml.j2
index 571f73f6..b42b8cc9 100644
--- a/tests/templates/kuttl/logging/04-install-hive.yaml.j2
+++ b/tests/templates/kuttl/logging/04-install-hive.yaml.j2
@@ -47,10 +47,11 @@ spec:
{% endif %}
pullPolicy: IfNotPresent
clusterConfig:
- database:
- connString: jdbc:postgresql://hive-postgresql:5432/hive
- credentialsSecret: hive-credentials
- dbType: postgres
+ metadataDatabase:
+ postgresql:
+ host: hive-postgresql
+ database: hive
+ credentialsSecret: hive-credentials
vectorAggregatorConfigMapName: hive-vector-aggregator-discovery
metastore:
roleGroups:
diff --git a/tests/templates/kuttl/orphaned-resources/01-install-hive.yaml.j2 b/tests/templates/kuttl/orphaned-resources/01-install-hive.yaml.j2
index cd8b1df0..7464dc42 100644
--- a/tests/templates/kuttl/orphaned-resources/01-install-hive.yaml.j2
+++ b/tests/templates/kuttl/orphaned-resources/01-install-hive.yaml.j2
@@ -13,10 +13,8 @@ spec:
{% endif %}
pullPolicy: IfNotPresent
clusterConfig:
- database:
- connString: jdbc:derby:;databaseName=/tmp/hive;create=true
- credentialsSecret: hive-credentials
- dbType: derby
+ metadataDatabase:
+ derby: {}
{% if lookup('env', 'VECTOR_AGGREGATOR') %}
vectorAggregatorConfigMapName: vector-aggregator-discovery
{% endif %}
@@ -29,12 +27,3 @@ spec:
replicas: 1
remove:
replicas: 1
----
-apiVersion: v1
-kind: Secret
-metadata:
- name: hive-credentials
-type: Opaque
-stringData:
- username: APP
- password: mine
diff --git a/tests/templates/kuttl/orphaned-resources/03-remove-role-group.yaml.j2 b/tests/templates/kuttl/orphaned-resources/03-remove-role-group.yaml.j2
index 19ae7250..ddc0eda0 100644
--- a/tests/templates/kuttl/orphaned-resources/03-remove-role-group.yaml.j2
+++ b/tests/templates/kuttl/orphaned-resources/03-remove-role-group.yaml.j2
@@ -4,19 +4,6 @@ kind: HiveCluster
metadata:
name: test-hive
spec:
- image:
-{% if test_scenario['values']['hive-latest'].find(",") > 0 %}
- custom: "{{ test_scenario['values']['hive-latest'].split(',')[1] }}"
- productVersion: "{{ test_scenario['values']['hive-latest'].split(',')[0] }}"
-{% else %}
- productVersion: "{{ test_scenario['values']['hive-latest'] }}"
-{% endif %}
- pullPolicy: IfNotPresent
- clusterConfig:
- database:
- connString: jdbc:derby:;databaseName=/tmp/hive;create=true
- credentialsSecret: hive-credentials
- dbType: derby
metastore:
roleGroups:
remove: null
diff --git a/tests/templates/kuttl/orphaned-resources/04-change-rolegroup.yaml b/tests/templates/kuttl/orphaned-resources/04-change-rolegroup.yaml
index 669d5592..712baac9 100644
--- a/tests/templates/kuttl/orphaned-resources/04-change-rolegroup.yaml
+++ b/tests/templates/kuttl/orphaned-resources/04-change-rolegroup.yaml
@@ -4,11 +4,6 @@ kind: HiveCluster
metadata:
name: test-hive
spec:
- clusterConfig:
- database:
- connString: jdbc:derby:;databaseName=/tmp/hive;create=true
- credentialsSecret: hive-credentials
- dbType: derby
metastore:
roleGroups:
default: null
diff --git a/tests/templates/kuttl/resources/10-install-hive.yaml.j2 b/tests/templates/kuttl/resources/10-install-hive.yaml.j2
index a985772d..7c847240 100644
--- a/tests/templates/kuttl/resources/10-install-hive.yaml.j2
+++ b/tests/templates/kuttl/resources/10-install-hive.yaml.j2
@@ -13,10 +13,8 @@ spec:
{% endif %}
pullPolicy: IfNotPresent
clusterConfig:
- database:
- connString: jdbc:derby:;databaseName=/tmp/hive;create=true
- credentialsSecret: hive-credentials
- dbType: derby
+ metadataDatabase:
+ derby: {}
{% if lookup('env', 'VECTOR_AGGREGATOR') %}
vectorAggregatorConfigMapName: vector-aggregator-discovery
{% endif %}
@@ -52,12 +50,3 @@ spec:
cpu: 500m
limits:
cpu: 3100m
----
-apiVersion: v1
-kind: Secret
-metadata:
- name: hive-credentials
-type: Opaque
-stringData:
- username: APP
- password: mine
diff --git a/tests/templates/kuttl/smoke/60-install-hive.yaml.j2 b/tests/templates/kuttl/smoke/60-install-hive.yaml.j2
index 1f35f82a..25d900a9 100644
--- a/tests/templates/kuttl/smoke/60-install-hive.yaml.j2
+++ b/tests/templates/kuttl/smoke/60-install-hive.yaml.j2
@@ -17,10 +17,11 @@ spec:
opa:
configMapName: opa
package: hms
- database:
- connString: jdbc:postgresql://postgresql:5432/hive
- credentialsSecret: hive-credentials
- dbType: postgres
+ metadataDatabase:
+ postgresql:
+ host: postgresql
+ database: hive
+ credentialsSecret: hive-credentials
s3:
reference: minio
{% if lookup('env', 'VECTOR_AGGREGATOR') %}
diff --git a/tests/templates/kuttl/upgrade/30-install-hive.yaml.j2 b/tests/templates/kuttl/upgrade/30-install-hive.yaml.j2
index 39d7fe02..5a2b0edb 100644
--- a/tests/templates/kuttl/upgrade/30-install-hive.yaml.j2
+++ b/tests/templates/kuttl/upgrade/30-install-hive.yaml.j2
@@ -13,10 +13,11 @@ spec:
{% endif %}
pullPolicy: IfNotPresent
clusterConfig:
- database:
- connString: jdbc:postgresql://postgresql:5432/hive
- credentialsSecret: hive-credentials
- dbType: postgres
+ metadataDatabase:
+ postgresql:
+ host: postgresql
+ database: hive
+ credentialsSecret: hive-credentials
{% if lookup('env', 'VECTOR_AGGREGATOR') %}
vectorAggregatorConfigMapName: vector-aggregator-discovery
{% endif %}