From 2267b78a10e8575f69a92b7e115c1a84f7bb8a91 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 21 Jan 2026 20:57:43 +0700 Subject: [PATCH 01/21] chore(deps): bump antd from 6.2.0 to 6.2.1 in /docs (#37301) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- docs/package.json | 2 +- docs/yarn.lock | 40 ++++++++++++++++++++-------------------- 2 files changed, 21 insertions(+), 21 deletions(-) diff --git a/docs/package.json b/docs/package.json index 8d3d6d159987..53902b70bce8 100644 --- a/docs/package.json +++ b/docs/package.json @@ -51,7 +51,7 @@ "@storybook/preview-api": "^8.6.11", "@storybook/theming": "^8.6.11", "@superset-ui/core": "^0.20.4", - "antd": "^6.2.0", + "antd": "^6.2.1", "caniuse-lite": "^1.0.30001764", "docusaurus-plugin-less": "^2.0.2", "js-yaml": "^4.1.1", diff --git a/docs/yarn.lock b/docs/yarn.lock index 9ef2d8aa940e..149cf3ac9c5a 100644 --- a/docs/yarn.lock +++ b/docs/yarn.lock @@ -212,10 +212,10 @@ "@babel/runtime" "^7.23.2" "@rc-component/util" "^1.4.0" -"@ant-design/cssinjs@^2.0.1": - version "2.0.1" - resolved "https://registry.yarnpkg.com/@ant-design/cssinjs/-/cssinjs-2.0.1.tgz#a7742deba17d613769db6d1aa4cfa46222ccec45" - integrity sha512-Lw1Z4cUQxdMmTNir67gU0HCpTl5TtkKCJPZ6UBvCqzcOTl/QmMFB6qAEoj8qFl0CuZDX9qQYa3m9+rEKfaBSbA== +"@ant-design/cssinjs@^2.0.1", "@ant-design/cssinjs@^2.0.3": + version "2.0.3" + resolved "https://registry.yarnpkg.com/@ant-design/cssinjs/-/cssinjs-2.0.3.tgz#b6d35b7b00f76fa8a8894d157bc158429ec7b1ca" + integrity sha512-HAo8SZ3a6G8v6jT0suCz1270na6EA3obeJWM4uzRijBhdwdoMAXWK2f4WWkwB28yUufsfk3CAhN1coGPQq4kNQ== dependencies: "@babel/runtime" "^7.11.1" "@emotion/hash" "^0.8.0" @@ -2919,10 +2919,10 @@ "@rc-component/util" "^1.3.0" clsx "^2.1.1" -"@rc-component/resize-observer@^1.0.0", "@rc-component/resize-observer@^1.0.1": - version "1.0.1" - resolved "https://registry.yarnpkg.com/@rc-component/resize-observer/-/resize-observer-1.0.1.tgz#bd07c2ab29baa019bd83a0870c07f6902d2241a3" - integrity sha512-r+w+Mz1EiueGk1IgjB3ptNXLYSLZ5vnEfKHH+gfgj7JMupftyzvUUl3fRcMZe5uMM04x0n8+G2o/c6nlO2+Wag== +"@rc-component/resize-observer@^1.0.0", "@rc-component/resize-observer@^1.0.1", "@rc-component/resize-observer@^1.1.1": + version "1.1.1" + resolved "https://registry.yarnpkg.com/@rc-component/resize-observer/-/resize-observer-1.1.1.tgz#216d7edb5259bb7d2a732735f0a103328ac8ad80" + integrity sha512-NfXXMmiR+SmUuKE1NwJESzEUYUFWIDUn2uXpxCTOLwiRUUakd62DRNFjRJArgzyFW8S5rsL4aX5XlyIXyC/vRA== dependencies: "@rc-component/util" "^1.2.0" @@ -3043,14 +3043,14 @@ "@rc-component/virtual-list" "^1.0.1" clsx "^2.1.1" -"@rc-component/trigger@^3.0.0", "@rc-component/trigger@^3.6.15", "@rc-component/trigger@^3.7.1", "@rc-component/trigger@^3.8.2": - version "3.8.2" - resolved "https://registry.yarnpkg.com/@rc-component/trigger/-/trigger-3.8.2.tgz#75d3bd194381678262c674e40392f287bea11765" - integrity sha512-I6idYAk8YY3Ly6v5hB7ONqxfdTYTcVNUmV1ZjtSsGH6N/k5tss9+OAtusr+7zdlIcD7TwnlsoB5etfB14ORtMw== +"@rc-component/trigger@^3.0.0", "@rc-component/trigger@^3.6.15", "@rc-component/trigger@^3.7.1", "@rc-component/trigger@^3.9.0": + version "3.9.0" + resolved "https://registry.yarnpkg.com/@rc-component/trigger/-/trigger-3.9.0.tgz#d4d2df167e9aced1bf17672d9104a3297663f766" + integrity sha512-X8btpwfrT27AgrZVOz4swclhEHTZcqaHeQMXXBgveagOiakTa36uObXbdwerXffgV8G9dH1fAAE0DHtVQs8EHg== dependencies: "@rc-component/motion" "^1.1.4" "@rc-component/portal" "^2.2.0" - "@rc-component/resize-observer" "^1.0.0" + "@rc-component/resize-observer" "^1.1.1" "@rc-component/util" "^1.2.1" clsx "^2.1.1" @@ -4877,13 +4877,13 @@ ansi-styles@^6.1.0: resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-6.2.1.tgz#0e62320cf99c21afff3b3012192546aacbfb05c5" integrity sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug== -antd@^6.2.0: - version "6.2.0" - resolved "https://registry.yarnpkg.com/antd/-/antd-6.2.0.tgz#f61f2ebad9a8e6c9905540c97e432f5efea3bbed" - integrity sha512-fwETatwHYExjfzKcV41fBtgPo4kp+g+9gp5YOSSGxwnJHljps8TbXef8WP7ZnaOn5dkcA9xIC0TyUecIybBG7w== +antd@^6.2.1: + version "6.2.1" + resolved "https://registry.yarnpkg.com/antd/-/antd-6.2.1.tgz#3014b5973e843641d0723476c7e234f45254a1f1" + integrity sha512-ycw/XX7So4MdrwYKGfvZJdkGiCYUOSTebAIi+ejE95WJ138b11oy/iJg7iH0qydaD/B5sFd7Tz8XfPBuW7CRmw== dependencies: "@ant-design/colors" "^8.0.1" - "@ant-design/cssinjs" "^2.0.1" + "@ant-design/cssinjs" "^2.0.3" "@ant-design/cssinjs-utils" "^2.0.2" "@ant-design/fast-color" "^3.0.0" "@ant-design/icons" "^6.1.0" @@ -4910,7 +4910,7 @@ antd@^6.2.0: "@rc-component/progress" "~1.0.2" "@rc-component/qrcode" "~1.1.1" "@rc-component/rate" "~1.0.1" - "@rc-component/resize-observer" "^1.0.1" + "@rc-component/resize-observer" "^1.1.1" "@rc-component/segmented" "~1.3.0" "@rc-component/select" "~1.5.0" "@rc-component/slider" "~1.0.1" @@ -4923,7 +4923,7 @@ antd@^6.2.0: "@rc-component/tour" "~2.3.0" "@rc-component/tree" "~1.1.0" "@rc-component/tree-select" "~1.6.0" - "@rc-component/trigger" "^3.8.2" + "@rc-component/trigger" "^3.9.0" "@rc-component/upload" "~1.1.0" "@rc-component/util" "^1.7.0" clsx "^2.1.1" From 445bc403b8c33086475be7f3ce73e31ad4cc0fad Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 21 Jan 2026 21:00:59 +0700 Subject: [PATCH 02/21] chore(deps): bump diff in /superset-frontend (#37292) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- superset-frontend/package-lock.json | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/superset-frontend/package-lock.json b/superset-frontend/package-lock.json index 375112743f15..0ad9edab9d75 100644 --- a/superset-frontend/package-lock.json +++ b/superset-frontend/package-lock.json @@ -28187,9 +28187,9 @@ "license": "BSD-3-Clause" }, "node_modules/diff": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/diff/-/diff-5.2.0.tgz", - "integrity": "sha512-uIFDxqpRZGZ6ThOk84hEfqWoHx2devRFvpTZcTHur85vImfaxUbTW9Ryh4CpCuDnToOP1CEtXKIgytHBPVff5A==", + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-5.2.2.tgz", + "integrity": "sha512-vtcDfH3TOjP8UekytvnHH1o1P4FcUdt4eQ1Y+Abap1tk/OB2MWQvcwS2ClCd1zuIhc3JKOx6p3kod8Vfys3E+A==", "license": "BSD-3-Clause", "engines": { "node": ">=0.3.1" @@ -56795,9 +56795,9 @@ } }, "node_modules/ts-node/node_modules/diff": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", - "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.4.tgz", + "integrity": "sha512-X07nttJQkwkfKfvTPG/KSnE2OMdcUCao6+eXF3wmnIQRn2aPAHH3VxDbDOdegkd6JbPsXqShpvEOHfAT+nCNwQ==", "license": "BSD-3-Clause", "engines": { "node": ">=0.3.1" @@ -66037,7 +66037,7 @@ "dependencies": { "@deck.gl/aggregation-layers": "~9.2.5", "@deck.gl/core": "~9.2.5", - "@deck.gl/extensions": "~9.2.2", + "@deck.gl/extensions": "~9.2.5", "@deck.gl/geo-layers": "~9.2.5", "@deck.gl/layers": "~9.2.5", "@deck.gl/mesh-layers": "~9.2.2", From 807ff513eff531b0ab8358e5ab0cc90261622b99 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 21 Jan 2026 21:01:46 +0700 Subject: [PATCH 03/21] chore(deps): bump fs-extra from 11.3.2 to 11.3.3 in /superset-frontend (#37274) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> From 281c0c9672297624b2ad9a8166b970440578a6b7 Mon Sep 17 00:00:00 2001 From: Ville Brofeldt <33317356+villebro@users.noreply.github.com> Date: Wed, 21 Jan 2026 06:19:28 -0800 Subject: [PATCH 04/21] chore: add paths to backend extension stack traces (#37300) --- superset/extensions/discovery.py | 22 +++++++++++- superset/extensions/types.py | 3 ++ superset/extensions/utils.py | 55 +++++++++++++++++++++++++---- superset/initialization/__init__.py | 5 ++- 4 files changed, 76 insertions(+), 9 deletions(-) diff --git a/superset/extensions/discovery.py b/superset/extensions/discovery.py index 5727a9d14b9d..41ad69c18b35 100644 --- a/superset/extensions/discovery.py +++ b/superset/extensions/discovery.py @@ -23,6 +23,7 @@ from superset.extensions.types import LoadedExtension from superset.extensions.utils import get_bundle_files_from_zip, get_loaded_extension +from superset.utils import json logger = logging.getLogger(__name__) @@ -59,8 +60,27 @@ def discover_and_load_extensions( try: with ZipFile(supx_file, "r") as zip_file: + # Read the manifest first to get the extension ID for the + # supx:// path + try: + manifest_content = zip_file.read("manifest.json") + manifest_data = json.loads(manifest_content) + extension_id = manifest_data["id"] + except (KeyError, json.JSONDecodeError) as e: + logger.error( + "Failed to read extension ID from manifest in %s: %s", + supx_file, + e, + ) + continue + + # Use supx:// scheme for tracebacks + source_base_path = f"supx://{extension_id}" + files = get_bundle_files_from_zip(zip_file) - extension = get_loaded_extension(files) + extension = get_loaded_extension( + files, source_base_path=source_base_path + ) logger.info( "Loaded extension '%s' from %s", extension.id, supx_file ) diff --git a/superset/extensions/types.py b/superset/extensions/types.py index 07d7a317b6e9..3f137abcddff 100644 --- a/superset/extensions/types.py +++ b/superset/extensions/types.py @@ -34,3 +34,6 @@ class LoadedExtension: frontend: dict[str, bytes] backend: dict[str, bytes] version: str + source_base_path: ( + str # Base path for traceback filenames (absolute path or supx:// URL) + ) diff --git a/superset/extensions/utils.py b/superset/extensions/utils.py index d885676d4ddb..883c91147280 100644 --- a/superset/extensions/utils.py +++ b/superset/extensions/utils.py @@ -55,15 +55,30 @@ def exec_module(self, module: Any) -> None: ) if self.is_package: module.__path__ = [] - exec(self.source, module.__dict__) # noqa: S102 + # Compile with filename for proper tracebacks + code = compile(self.source, self.origin, "exec") + exec(code, module.__dict__) # noqa: S102 class InMemoryFinder(importlib.abc.MetaPathFinder): - def __init__(self, file_dict: dict[str, bytes]) -> None: + def __init__(self, file_dict: dict[str, bytes], source_base_path: str) -> None: self.modules: dict[str, Tuple[Any, Any, Any]] = {} + + # Detect if this is a virtual path (supx://) or filesystem path + is_virtual_path = source_base_path.startswith("supx://") + for path, content in file_dict.items(): mod_name, is_package = self._get_module_name(path) - self.modules[mod_name] = (content, is_package, path) + + # Reconstruct full path for tracebacks + if is_virtual_path: + # Virtual paths always use forward slashes + # e.g., supx://extension-id/backend/src/tasks.py + full_path = f"{source_base_path}/backend/src/{path}" + else: + full_path = str(Path(source_base_path) / "backend" / "src" / path) + + self.modules[mod_name] = (content, is_package, full_path) def _get_module_name(self, file_path: str) -> Tuple[str, bool]: parts = list(Path(file_path).parts) @@ -88,8 +103,19 @@ def find_spec(self, fullname: str, path: Any, target: Any = None) -> Any | None: return None -def install_in_memory_importer(file_dict: dict[str, bytes]) -> None: - finder = InMemoryFinder(file_dict) +def install_in_memory_importer( + file_dict: dict[str, bytes], source_base_path: str +) -> None: + """ + Install an in-memory module importer for extension backend code. + + :param file_dict: Dictionary mapping relative file paths to their content + :param source_base_path: Base path for traceback filenames. For LOCAL_EXTENSIONS, + this should be an absolute filesystem path to the dist directory. + For EXTENSIONS_PATH (.supx files), this should be a supx:// URL + (e.g., "supx://extension-id"). + """ + finder = InMemoryFinder(file_dict, source_base_path) sys.meta_path.insert(0, finder) @@ -121,7 +147,19 @@ def get_bundle_files_from_path(base_path: str) -> Generator[BundleFile, None, No yield BundleFile(name=rel_path, content=content) -def get_loaded_extension(files: Iterable[BundleFile]) -> LoadedExtension: +def get_loaded_extension( + files: Iterable[BundleFile], source_base_path: str +) -> LoadedExtension: + """ + Load an extension from bundle files. + + :param files: Iterable of BundleFile objects containing the extension files + :param source_base_path: Base path for traceback filenames. For LOCAL_EXTENSIONS, + this should be an absolute filesystem path to the dist directory. + For EXTENSIONS_PATH (.supx files), this should be a supx:// URL + (e.g., "supx://extension-id"). + :returns: LoadedExtension instance + """ manifest: Manifest | None = None frontend: dict[str, bytes] = {} backend: dict[str, bytes] = {} @@ -158,6 +196,7 @@ def get_loaded_extension(files: Iterable[BundleFile]) -> LoadedExtension: frontend=frontend, backend=backend, version=manifest.version, + source_base_path=source_base_path, ) @@ -190,7 +229,9 @@ def get_extensions() -> dict[str, LoadedExtension]: # Load extensions from LOCAL_EXTENSIONS configuration (filesystem paths) for path in current_app.config["LOCAL_EXTENSIONS"]: files = get_bundle_files_from_path(path) - extension = get_loaded_extension(files) + # Use absolute filesystem path to dist directory for tracebacks + abs_dist_path = str((Path(path) / "dist").resolve()) + extension = get_loaded_extension(files, source_base_path=abs_dist_path) extension_id = extension.manifest.id extensions[extension_id] = extension logger.info( diff --git a/superset/initialization/__init__.py b/superset/initialization/__init__.py index 1f18f7da0cdc..adc783c1e128 100644 --- a/superset/initialization/__init__.py +++ b/superset/initialization/__init__.py @@ -562,7 +562,10 @@ def init_extensions(self) -> None: for extension in extensions.values(): if backend_files := extension.backend: - install_in_memory_importer(backend_files) + install_in_memory_importer( + backend_files, + source_base_path=extension.source_base_path, + ) backend = extension.manifest.backend From 238bebebeced9bd238d8281f7c966ab132e293f7 Mon Sep 17 00:00:00 2001 From: "Michael S. Molina" <70410625+michael-s-molina@users.noreply.github.com> Date: Wed, 21 Jan 2026 11:22:14 -0300 Subject: [PATCH 05/21] fix(extensions): prevent duplicate initialization of LOCAL_EXTENSIONS watcher (#37250) --- superset/extensions/local_extensions_watcher.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/superset/extensions/local_extensions_watcher.py b/superset/extensions/local_extensions_watcher.py index 6d79a3298afb..4a5c9a0501ba 100644 --- a/superset/extensions/local_extensions_watcher.py +++ b/superset/extensions/local_extensions_watcher.py @@ -32,6 +32,10 @@ logger = logging.getLogger(__name__) +# Guard to prevent multiple initializations +_watcher_initialized = False +_watcher_lock = threading.Lock() + def _get_file_handler_class() -> Any: """Get the file handler class, importing watchdog only when needed.""" @@ -68,6 +72,14 @@ def on_any_event(self, event: Any) -> None: def setup_local_extensions_watcher(app: Flask) -> None: # noqa: C901 """Set up file watcher for LOCAL_EXTENSIONS directories.""" + global _watcher_initialized + + # Prevent multiple initializations + with _watcher_lock: + if _watcher_initialized: + return + _watcher_initialized = True + # Only set up watcher in debug mode or when Flask reloader is enabled if not (app.debug or app.config.get("FLASK_USE_RELOAD", False)): return From 801c84f0ef7038279f90b32856ada7dbf779caa6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 21 Jan 2026 22:05:43 +0700 Subject: [PATCH 06/21] chore(deps-dev): bump typescript-eslint from 8.53.0 to 8.53.1 in /superset-websocket (#37268) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- superset-websocket/package-lock.json | 244 +++++++++++++-------------- superset-websocket/package.json | 2 +- 2 files changed, 123 insertions(+), 123 deletions(-) diff --git a/superset-websocket/package-lock.json b/superset-websocket/package-lock.json index 114e39f4311e..a6c6516b8bb1 100644 --- a/superset-websocket/package-lock.json +++ b/superset-websocket/package-lock.json @@ -40,7 +40,7 @@ "ts-node": "^10.9.2", "tscw-config": "^1.1.2", "typescript": "^5.9.3", - "typescript-eslint": "^8.53.0" + "typescript-eslint": "^8.53.1" }, "engines": { "node": "^20.19.4", @@ -1875,17 +1875,17 @@ "dev": true }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "8.53.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.53.0.tgz", - "integrity": "sha512-eEXsVvLPu8Z4PkFibtuFJLJOTAV/nPdgtSjkGoPpddpFk3/ym2oy97jynY6ic2m6+nc5M8SE1e9v/mHKsulcJg==", + "version": "8.53.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.53.1.tgz", + "integrity": "sha512-cFYYFZ+oQFi6hUnBTbLRXfTJiaQtYE3t4O692agbBl+2Zy+eqSKWtPjhPXJu1G7j4RLjKgeJPDdq3EqOwmX5Ag==", "dev": true, "license": "MIT", "dependencies": { "@eslint-community/regexpp": "^4.12.2", - "@typescript-eslint/scope-manager": "8.53.0", - "@typescript-eslint/type-utils": "8.53.0", - "@typescript-eslint/utils": "8.53.0", - "@typescript-eslint/visitor-keys": "8.53.0", + "@typescript-eslint/scope-manager": "8.53.1", + "@typescript-eslint/type-utils": "8.53.1", + "@typescript-eslint/utils": "8.53.1", + "@typescript-eslint/visitor-keys": "8.53.1", "ignore": "^7.0.5", "natural-compare": "^1.4.0", "ts-api-utils": "^2.4.0" @@ -1898,7 +1898,7 @@ "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "@typescript-eslint/parser": "^8.53.0", + "@typescript-eslint/parser": "^8.53.1", "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <6.0.0" } @@ -1914,16 +1914,16 @@ } }, "node_modules/@typescript-eslint/parser": { - "version": "8.53.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.53.0.tgz", - "integrity": "sha512-npiaib8XzbjtzS2N4HlqPvlpxpmZ14FjSJrteZpPxGUaYPlvhzlzUZ4mZyABo0EFrOWnvyd0Xxroq//hKhtAWg==", + "version": "8.53.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.53.1.tgz", + "integrity": "sha512-nm3cvFN9SqZGXjmw5bZ6cGmvJSyJPn0wU9gHAZZHDnZl2wF9PhHv78Xf06E0MaNk4zLVHL8hb2/c32XvyJOLQg==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/scope-manager": "8.53.0", - "@typescript-eslint/types": "8.53.0", - "@typescript-eslint/typescript-estree": "8.53.0", - "@typescript-eslint/visitor-keys": "8.53.0", + "@typescript-eslint/scope-manager": "8.53.1", + "@typescript-eslint/types": "8.53.1", + "@typescript-eslint/typescript-estree": "8.53.1", + "@typescript-eslint/visitor-keys": "8.53.1", "debug": "^4.4.3" }, "engines": { @@ -1939,14 +1939,14 @@ } }, "node_modules/@typescript-eslint/project-service": { - "version": "8.53.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.53.0.tgz", - "integrity": "sha512-Bl6Gdr7NqkqIP5yP9z1JU///Nmes4Eose6L1HwpuVHwScgDPPuEWbUVhvlZmb8hy0vX9syLk5EGNL700WcBlbg==", + "version": "8.53.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.53.1.tgz", + "integrity": "sha512-WYC4FB5Ra0xidsmlPb+1SsnaSKPmS3gsjIARwbEkHkoWloQmuzcfypljaJcR78uyLA1h8sHdWWPHSLDI+MtNog==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/tsconfig-utils": "^8.53.0", - "@typescript-eslint/types": "^8.53.0", + "@typescript-eslint/tsconfig-utils": "^8.53.1", + "@typescript-eslint/types": "^8.53.1", "debug": "^4.4.3" }, "engines": { @@ -1961,14 +1961,14 @@ } }, "node_modules/@typescript-eslint/scope-manager": { - "version": "8.53.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.53.0.tgz", - "integrity": "sha512-kWNj3l01eOGSdVBnfAF2K1BTh06WS0Yet6JUgb9Cmkqaz3Jlu0fdVUjj9UI8gPidBWSMqDIglmEXifSgDT/D0g==", + "version": "8.53.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.53.1.tgz", + "integrity": "sha512-Lu23yw1uJMFY8cUeq7JlrizAgeQvWugNQzJp8C3x8Eo5Jw5Q2ykMdiiTB9vBVOOUBysMzmRRmUfwFrZuI2C4SQ==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.53.0", - "@typescript-eslint/visitor-keys": "8.53.0" + "@typescript-eslint/types": "8.53.1", + "@typescript-eslint/visitor-keys": "8.53.1" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -1979,9 +1979,9 @@ } }, "node_modules/@typescript-eslint/tsconfig-utils": { - "version": "8.53.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.53.0.tgz", - "integrity": "sha512-K6Sc0R5GIG6dNoPdOooQ+KtvT5KCKAvTcY8h2rIuul19vxH5OTQk7ArKkd4yTzkw66WnNY0kPPzzcmWA+XRmiA==", + "version": "8.53.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.53.1.tgz", + "integrity": "sha512-qfvLXS6F6b1y43pnf0pPbXJ+YoXIC7HKg0UGZ27uMIemKMKA6XH2DTxsEDdpdN29D+vHV07x/pnlPNVLhdhWiA==", "dev": true, "license": "MIT", "engines": { @@ -1996,15 +1996,15 @@ } }, "node_modules/@typescript-eslint/type-utils": { - "version": "8.53.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.53.0.tgz", - "integrity": "sha512-BBAUhlx7g4SmcLhn8cnbxoxtmS7hcq39xKCgiutL3oNx1TaIp+cny51s8ewnKMpVUKQUGb41RAUWZ9kxYdovuw==", + "version": "8.53.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.53.1.tgz", + "integrity": "sha512-MOrdtNvyhy0rHyv0ENzub1d4wQYKb2NmIqG7qEqPWFW7Mpy2jzFC3pQ2yKDvirZB7jypm5uGjF2Qqs6OIqu47w==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.53.0", - "@typescript-eslint/typescript-estree": "8.53.0", - "@typescript-eslint/utils": "8.53.0", + "@typescript-eslint/types": "8.53.1", + "@typescript-eslint/typescript-estree": "8.53.1", + "@typescript-eslint/utils": "8.53.1", "debug": "^4.4.3", "ts-api-utils": "^2.4.0" }, @@ -2021,9 +2021,9 @@ } }, "node_modules/@typescript-eslint/types": { - "version": "8.53.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.53.0.tgz", - "integrity": "sha512-Bmh9KX31Vlxa13+PqPvt4RzKRN1XORYSLlAE+sO1i28NkisGbTtSLFVB3l7PWdHtR3E0mVMuC7JilWJ99m2HxQ==", + "version": "8.53.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.53.1.tgz", + "integrity": "sha512-jr/swrr2aRmUAUjW5/zQHbMaui//vQlsZcJKijZf3M26bnmLj8LyZUpj8/Rd6uzaek06OWsqdofN/Thenm5O8A==", "dev": true, "license": "MIT", "engines": { @@ -2035,16 +2035,16 @@ } }, "node_modules/@typescript-eslint/typescript-estree": { - "version": "8.53.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.53.0.tgz", - "integrity": "sha512-pw0c0Gdo7Z4xOG987u3nJ8akL9093yEEKv8QTJ+Bhkghj1xyj8cgPaavlr9rq8h7+s6plUJ4QJYw2gCZodqmGw==", + "version": "8.53.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.53.1.tgz", + "integrity": "sha512-RGlVipGhQAG4GxV1s34O91cxQ/vWiHJTDHbXRr0li2q/BGg3RR/7NM8QDWgkEgrwQYCvmJV9ichIwyoKCQ+DTg==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/project-service": "8.53.0", - "@typescript-eslint/tsconfig-utils": "8.53.0", - "@typescript-eslint/types": "8.53.0", - "@typescript-eslint/visitor-keys": "8.53.0", + "@typescript-eslint/project-service": "8.53.1", + "@typescript-eslint/tsconfig-utils": "8.53.1", + "@typescript-eslint/types": "8.53.1", + "@typescript-eslint/visitor-keys": "8.53.1", "debug": "^4.4.3", "minimatch": "^9.0.5", "semver": "^7.7.3", @@ -2089,16 +2089,16 @@ } }, "node_modules/@typescript-eslint/utils": { - "version": "8.53.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.53.0.tgz", - "integrity": "sha512-XDY4mXTez3Z1iRDI5mbRhH4DFSt46oaIFsLg+Zn97+sYrXACziXSQcSelMybnVZ5pa1P6xYkPr5cMJyunM1ZDA==", + "version": "8.53.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.53.1.tgz", + "integrity": "sha512-c4bMvGVWW4hv6JmDUEG7fSYlWOl3II2I4ylt0NM+seinYQlZMQIaKaXIIVJWt9Ofh6whrpM+EdDQXKXjNovvrg==", "dev": true, "license": "MIT", "dependencies": { "@eslint-community/eslint-utils": "^4.9.1", - "@typescript-eslint/scope-manager": "8.53.0", - "@typescript-eslint/types": "8.53.0", - "@typescript-eslint/typescript-estree": "8.53.0" + "@typescript-eslint/scope-manager": "8.53.1", + "@typescript-eslint/types": "8.53.1", + "@typescript-eslint/typescript-estree": "8.53.1" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -2113,13 +2113,13 @@ } }, "node_modules/@typescript-eslint/visitor-keys": { - "version": "8.53.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.53.0.tgz", - "integrity": "sha512-LZ2NqIHFhvFwxG0qZeLL9DvdNAHPGCY5dIRwBhyYeU+LfLhcStE1ImjsuTG/WaVh3XysGaeLW8Rqq7cGkPCFvw==", + "version": "8.53.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.53.1.tgz", + "integrity": "sha512-oy+wV7xDKFPRyNggmXuZQSBzvoLnpmJs+GhzRhPjrxl2b/jIlyjVokzm47CZCDUdXKr2zd7ZLodPfOBpOPyPlg==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.53.0", + "@typescript-eslint/types": "8.53.1", "eslint-visitor-keys": "^4.2.1" }, "engines": { @@ -6218,16 +6218,16 @@ } }, "node_modules/typescript-eslint": { - "version": "8.53.0", - "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.53.0.tgz", - "integrity": "sha512-xHURCQNxZ1dsWn0sdOaOfCSQG0HKeqSj9OexIxrz6ypU6wHYOdX2I3D2b8s8wFSsSOYJb+6q283cLiLlkEsBYw==", + "version": "8.53.1", + "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.53.1.tgz", + "integrity": "sha512-gB+EVQfP5RDElh9ittfXlhZJdjSU4jUSTyE2+ia8CYyNvet4ElfaLlAIqDvQV9JPknKx0jQH1racTYe/4LaLSg==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/eslint-plugin": "8.53.0", - "@typescript-eslint/parser": "8.53.0", - "@typescript-eslint/typescript-estree": "8.53.0", - "@typescript-eslint/utils": "8.53.0" + "@typescript-eslint/eslint-plugin": "8.53.1", + "@typescript-eslint/parser": "8.53.1", + "@typescript-eslint/typescript-estree": "8.53.1", + "@typescript-eslint/utils": "8.53.1" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -7993,16 +7993,16 @@ "dev": true }, "@typescript-eslint/eslint-plugin": { - "version": "8.53.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.53.0.tgz", - "integrity": "sha512-eEXsVvLPu8Z4PkFibtuFJLJOTAV/nPdgtSjkGoPpddpFk3/ym2oy97jynY6ic2m6+nc5M8SE1e9v/mHKsulcJg==", + "version": "8.53.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.53.1.tgz", + "integrity": "sha512-cFYYFZ+oQFi6hUnBTbLRXfTJiaQtYE3t4O692agbBl+2Zy+eqSKWtPjhPXJu1G7j4RLjKgeJPDdq3EqOwmX5Ag==", "dev": true, "requires": { "@eslint-community/regexpp": "^4.12.2", - "@typescript-eslint/scope-manager": "8.53.0", - "@typescript-eslint/type-utils": "8.53.0", - "@typescript-eslint/utils": "8.53.0", - "@typescript-eslint/visitor-keys": "8.53.0", + "@typescript-eslint/scope-manager": "8.53.1", + "@typescript-eslint/type-utils": "8.53.1", + "@typescript-eslint/utils": "8.53.1", + "@typescript-eslint/visitor-keys": "8.53.1", "ignore": "^7.0.5", "natural-compare": "^1.4.0", "ts-api-utils": "^2.4.0" @@ -8017,75 +8017,75 @@ } }, "@typescript-eslint/parser": { - "version": "8.53.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.53.0.tgz", - "integrity": "sha512-npiaib8XzbjtzS2N4HlqPvlpxpmZ14FjSJrteZpPxGUaYPlvhzlzUZ4mZyABo0EFrOWnvyd0Xxroq//hKhtAWg==", + "version": "8.53.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.53.1.tgz", + "integrity": "sha512-nm3cvFN9SqZGXjmw5bZ6cGmvJSyJPn0wU9gHAZZHDnZl2wF9PhHv78Xf06E0MaNk4zLVHL8hb2/c32XvyJOLQg==", "dev": true, "requires": { - "@typescript-eslint/scope-manager": "8.53.0", - "@typescript-eslint/types": "8.53.0", - "@typescript-eslint/typescript-estree": "8.53.0", - "@typescript-eslint/visitor-keys": "8.53.0", + "@typescript-eslint/scope-manager": "8.53.1", + "@typescript-eslint/types": "8.53.1", + "@typescript-eslint/typescript-estree": "8.53.1", + "@typescript-eslint/visitor-keys": "8.53.1", "debug": "^4.4.3" } }, "@typescript-eslint/project-service": { - "version": "8.53.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.53.0.tgz", - "integrity": "sha512-Bl6Gdr7NqkqIP5yP9z1JU///Nmes4Eose6L1HwpuVHwScgDPPuEWbUVhvlZmb8hy0vX9syLk5EGNL700WcBlbg==", + "version": "8.53.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.53.1.tgz", + "integrity": "sha512-WYC4FB5Ra0xidsmlPb+1SsnaSKPmS3gsjIARwbEkHkoWloQmuzcfypljaJcR78uyLA1h8sHdWWPHSLDI+MtNog==", "dev": true, "requires": { - "@typescript-eslint/tsconfig-utils": "^8.53.0", - "@typescript-eslint/types": "^8.53.0", + "@typescript-eslint/tsconfig-utils": "^8.53.1", + "@typescript-eslint/types": "^8.53.1", "debug": "^4.4.3" } }, "@typescript-eslint/scope-manager": { - "version": "8.53.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.53.0.tgz", - "integrity": "sha512-kWNj3l01eOGSdVBnfAF2K1BTh06WS0Yet6JUgb9Cmkqaz3Jlu0fdVUjj9UI8gPidBWSMqDIglmEXifSgDT/D0g==", + "version": "8.53.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.53.1.tgz", + "integrity": "sha512-Lu23yw1uJMFY8cUeq7JlrizAgeQvWugNQzJp8C3x8Eo5Jw5Q2ykMdiiTB9vBVOOUBysMzmRRmUfwFrZuI2C4SQ==", "dev": true, "requires": { - "@typescript-eslint/types": "8.53.0", - "@typescript-eslint/visitor-keys": "8.53.0" + "@typescript-eslint/types": "8.53.1", + "@typescript-eslint/visitor-keys": "8.53.1" } }, "@typescript-eslint/tsconfig-utils": { - "version": "8.53.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.53.0.tgz", - "integrity": "sha512-K6Sc0R5GIG6dNoPdOooQ+KtvT5KCKAvTcY8h2rIuul19vxH5OTQk7ArKkd4yTzkw66WnNY0kPPzzcmWA+XRmiA==", + "version": "8.53.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.53.1.tgz", + "integrity": "sha512-qfvLXS6F6b1y43pnf0pPbXJ+YoXIC7HKg0UGZ27uMIemKMKA6XH2DTxsEDdpdN29D+vHV07x/pnlPNVLhdhWiA==", "dev": true, "requires": {} }, "@typescript-eslint/type-utils": { - "version": "8.53.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.53.0.tgz", - "integrity": "sha512-BBAUhlx7g4SmcLhn8cnbxoxtmS7hcq39xKCgiutL3oNx1TaIp+cny51s8ewnKMpVUKQUGb41RAUWZ9kxYdovuw==", + "version": "8.53.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.53.1.tgz", + "integrity": "sha512-MOrdtNvyhy0rHyv0ENzub1d4wQYKb2NmIqG7qEqPWFW7Mpy2jzFC3pQ2yKDvirZB7jypm5uGjF2Qqs6OIqu47w==", "dev": true, "requires": { - "@typescript-eslint/types": "8.53.0", - "@typescript-eslint/typescript-estree": "8.53.0", - "@typescript-eslint/utils": "8.53.0", + "@typescript-eslint/types": "8.53.1", + "@typescript-eslint/typescript-estree": "8.53.1", + "@typescript-eslint/utils": "8.53.1", "debug": "^4.4.3", "ts-api-utils": "^2.4.0" } }, "@typescript-eslint/types": { - "version": "8.53.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.53.0.tgz", - "integrity": "sha512-Bmh9KX31Vlxa13+PqPvt4RzKRN1XORYSLlAE+sO1i28NkisGbTtSLFVB3l7PWdHtR3E0mVMuC7JilWJ99m2HxQ==", + "version": "8.53.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.53.1.tgz", + "integrity": "sha512-jr/swrr2aRmUAUjW5/zQHbMaui//vQlsZcJKijZf3M26bnmLj8LyZUpj8/Rd6uzaek06OWsqdofN/Thenm5O8A==", "dev": true }, "@typescript-eslint/typescript-estree": { - "version": "8.53.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.53.0.tgz", - "integrity": "sha512-pw0c0Gdo7Z4xOG987u3nJ8akL9093yEEKv8QTJ+Bhkghj1xyj8cgPaavlr9rq8h7+s6plUJ4QJYw2gCZodqmGw==", + "version": "8.53.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.53.1.tgz", + "integrity": "sha512-RGlVipGhQAG4GxV1s34O91cxQ/vWiHJTDHbXRr0li2q/BGg3RR/7NM8QDWgkEgrwQYCvmJV9ichIwyoKCQ+DTg==", "dev": true, "requires": { - "@typescript-eslint/project-service": "8.53.0", - "@typescript-eslint/tsconfig-utils": "8.53.0", - "@typescript-eslint/types": "8.53.0", - "@typescript-eslint/visitor-keys": "8.53.0", + "@typescript-eslint/project-service": "8.53.1", + "@typescript-eslint/tsconfig-utils": "8.53.1", + "@typescript-eslint/types": "8.53.1", + "@typescript-eslint/visitor-keys": "8.53.1", "debug": "^4.4.3", "minimatch": "^9.0.5", "semver": "^7.7.3", @@ -8114,24 +8114,24 @@ } }, "@typescript-eslint/utils": { - "version": "8.53.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.53.0.tgz", - "integrity": "sha512-XDY4mXTez3Z1iRDI5mbRhH4DFSt46oaIFsLg+Zn97+sYrXACziXSQcSelMybnVZ5pa1P6xYkPr5cMJyunM1ZDA==", + "version": "8.53.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.53.1.tgz", + "integrity": "sha512-c4bMvGVWW4hv6JmDUEG7fSYlWOl3II2I4ylt0NM+seinYQlZMQIaKaXIIVJWt9Ofh6whrpM+EdDQXKXjNovvrg==", "dev": true, "requires": { "@eslint-community/eslint-utils": "^4.9.1", - "@typescript-eslint/scope-manager": "8.53.0", - "@typescript-eslint/types": "8.53.0", - "@typescript-eslint/typescript-estree": "8.53.0" + "@typescript-eslint/scope-manager": "8.53.1", + "@typescript-eslint/types": "8.53.1", + "@typescript-eslint/typescript-estree": "8.53.1" } }, "@typescript-eslint/visitor-keys": { - "version": "8.53.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.53.0.tgz", - "integrity": "sha512-LZ2NqIHFhvFwxG0qZeLL9DvdNAHPGCY5dIRwBhyYeU+LfLhcStE1ImjsuTG/WaVh3XysGaeLW8Rqq7cGkPCFvw==", + "version": "8.53.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.53.1.tgz", + "integrity": "sha512-oy+wV7xDKFPRyNggmXuZQSBzvoLnpmJs+GhzRhPjrxl2b/jIlyjVokzm47CZCDUdXKr2zd7ZLodPfOBpOPyPlg==", "dev": true, "requires": { - "@typescript-eslint/types": "8.53.0", + "@typescript-eslint/types": "8.53.1", "eslint-visitor-keys": "^4.2.1" }, "dependencies": { @@ -11104,15 +11104,15 @@ "dev": true }, "typescript-eslint": { - "version": "8.53.0", - "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.53.0.tgz", - "integrity": "sha512-xHURCQNxZ1dsWn0sdOaOfCSQG0HKeqSj9OexIxrz6ypU6wHYOdX2I3D2b8s8wFSsSOYJb+6q283cLiLlkEsBYw==", + "version": "8.53.1", + "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.53.1.tgz", + "integrity": "sha512-gB+EVQfP5RDElh9ittfXlhZJdjSU4jUSTyE2+ia8CYyNvet4ElfaLlAIqDvQV9JPknKx0jQH1racTYe/4LaLSg==", "dev": true, "requires": { - "@typescript-eslint/eslint-plugin": "8.53.0", - "@typescript-eslint/parser": "8.53.0", - "@typescript-eslint/typescript-estree": "8.53.0", - "@typescript-eslint/utils": "8.53.0" + "@typescript-eslint/eslint-plugin": "8.53.1", + "@typescript-eslint/parser": "8.53.1", + "@typescript-eslint/typescript-estree": "8.53.1", + "@typescript-eslint/utils": "8.53.1" } }, "uglify-js": { diff --git a/superset-websocket/package.json b/superset-websocket/package.json index 176f208f82b7..afb6b422ed0d 100644 --- a/superset-websocket/package.json +++ b/superset-websocket/package.json @@ -48,7 +48,7 @@ "ts-node": "^10.9.2", "tscw-config": "^1.1.2", "typescript": "^5.9.3", - "typescript-eslint": "^8.53.0" + "typescript-eslint": "^8.53.1" }, "engines": { "node": "^20.19.4", From 3fa7dba0946213a13b58b5edd457a2eb2d7d5bcb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 21 Jan 2026 22:09:15 +0700 Subject: [PATCH 07/21] chore(deps): bump diff from 5.2.0 to 5.2.2 in /docs (#37291) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- docs/yarn.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/yarn.lock b/docs/yarn.lock index 149cf3ac9c5a..234ae602a552 100644 --- a/docs/yarn.lock +++ b/docs/yarn.lock @@ -6590,9 +6590,9 @@ devlop@^1.0.0, devlop@^1.1.0: dequal "^2.0.0" diff@^5.0.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/diff/-/diff-5.2.0.tgz#26ded047cd1179b78b9537d5ef725503ce1ae531" - integrity sha512-uIFDxqpRZGZ6ThOk84hEfqWoHx2devRFvpTZcTHur85vImfaxUbTW9Ryh4CpCuDnToOP1CEtXKIgytHBPVff5A== + version "5.2.2" + resolved "https://registry.yarnpkg.com/diff/-/diff-5.2.2.tgz#0a4742797281d09cfa699b79ea32d27723623bad" + integrity sha512-vtcDfH3TOjP8UekytvnHH1o1P4FcUdt4eQ1Y+Abap1tk/OB2MWQvcwS2ClCd1zuIhc3JKOx6p3kod8Vfys3E+A== dir-glob@^3.0.1: version "3.0.1" From a1d24f1e4a379e5c8ea95d9690dd638f2c8f8cc9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 21 Jan 2026 23:10:06 +0700 Subject: [PATCH 08/21] chore(deps-dev): bump oxlint from 1.33.0 to 1.41.0 in /superset-frontend (#37279) Signed-off-by: dependabot[bot] Signed-off-by: hainenber Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: hainenber --- superset-frontend/oxlint.json | 2 +- superset-frontend/package-lock.json | 75 +++++++++---------- superset-frontend/package.json | 2 +- .../src/components/DatePicker/index.tsx | 5 +- .../src/explore/components/SaveModal.tsx | 2 +- 5 files changed, 41 insertions(+), 45 deletions(-) diff --git a/superset-frontend/oxlint.json b/superset-frontend/oxlint.json index 8652a37d50ce..78e680df2145 100644 --- a/superset-frontend/oxlint.json +++ b/superset-frontend/oxlint.json @@ -14,7 +14,7 @@ }, "settings": { "react": { - "version": "detect" + "version": "17.0.2" } }, "rules": { diff --git a/superset-frontend/package-lock.json b/superset-frontend/package-lock.json index 0ad9edab9d75..aa8b16e94ca0 100644 --- a/superset-frontend/package-lock.json +++ b/superset-frontend/package-lock.json @@ -254,7 +254,7 @@ "lightningcss": "^1.30.2", "mini-css-extract-plugin": "^2.9.4", "open-cli": "^8.0.0", - "oxlint": "^1.32.0", + "oxlint": "^1.41.0", "po2json": "^0.4.5", "prettier": "3.8.0", "prettier-plugin-packagejson": "^2.5.20", @@ -10435,9 +10435,9 @@ } }, "node_modules/@oxlint/darwin-arm64": { - "version": "1.33.0", - "resolved": "https://registry.npmjs.org/@oxlint/darwin-arm64/-/darwin-arm64-1.33.0.tgz", - "integrity": "sha512-PmEQDLHAxiAdyttQ1ZWXd+5VpHLbHf3FTMJL9bg5TZamDnhNiW/v0Pamv3MTAdymnoDI3H8IVLAN/SAseV/adw==", + "version": "1.41.0", + "resolved": "https://registry.npmjs.org/@oxlint/darwin-arm64/-/darwin-arm64-1.41.0.tgz", + "integrity": "sha512-K0Bs0cNW11oWdSrKmrollKF44HMM2HKr4QidZQHMlhJcSX8pozxv0V5FLdqB4sddzCY0J9Wuuw+oRAfR8sdRwA==", "cpu": [ "arm64" ], @@ -10449,9 +10449,9 @@ ] }, "node_modules/@oxlint/darwin-x64": { - "version": "1.33.0", - "resolved": "https://registry.npmjs.org/@oxlint/darwin-x64/-/darwin-x64-1.33.0.tgz", - "integrity": "sha512-2R9aH3kR0X2M30z5agGikv3tfNTi8/uLhU5/tYktu33VGUXpbf0OLZSlD25UEuwOKAlf3RVtzV5oDyjoq93JuQ==", + "version": "1.41.0", + "resolved": "https://registry.npmjs.org/@oxlint/darwin-x64/-/darwin-x64-1.41.0.tgz", + "integrity": "sha512-1LCCXCe9nN8LbrJ1QOGari2HqnxrZrveYKysWDIg8gFsQglIg00XF/8lRbA0kWHMdLgt4X0wfNYhhFz+c3XXLQ==", "cpu": [ "x64" ], @@ -10463,9 +10463,9 @@ ] }, "node_modules/@oxlint/linux-arm64-gnu": { - "version": "1.33.0", - "resolved": "https://registry.npmjs.org/@oxlint/linux-arm64-gnu/-/linux-arm64-gnu-1.33.0.tgz", - "integrity": "sha512-yb/k8GaMDgnX2LyO6km33kKItZ/n573SlbiHBBFU2HmeU7tzEHL5jHkHQXXcysUkapmqHd7UsDhOZDqPmXaQRg==", + "version": "1.41.0", + "resolved": "https://registry.npmjs.org/@oxlint/linux-arm64-gnu/-/linux-arm64-gnu-1.41.0.tgz", + "integrity": "sha512-Fow7H84Bs8XxuaK1yfSEWBC8HI7rfEQB9eR2A0J61un1WgCas7jNrt1HbT6+p6KmUH2bhR+r/RDu/6JFAvvj4g==", "cpu": [ "arm64" ], @@ -10477,9 +10477,9 @@ ] }, "node_modules/@oxlint/linux-arm64-musl": { - "version": "1.33.0", - "resolved": "https://registry.npmjs.org/@oxlint/linux-arm64-musl/-/linux-arm64-musl-1.33.0.tgz", - "integrity": "sha512-03pt9IO1C4ZfVOW6SQiOK26mzklAhLM3Kc79OXpX1kgZRlxk+rvFoMhlgCOzn7tEdrEgbePkBoxNnwDnJDFqJQ==", + "version": "1.41.0", + "resolved": "https://registry.npmjs.org/@oxlint/linux-arm64-musl/-/linux-arm64-musl-1.41.0.tgz", + "integrity": "sha512-WoRRDNwgP5W3rjRh42Zdx8ferYnqpKoYCv2QQLenmdrLjRGYwAd52uywfkcS45mKEWHeY1RPwPkYCSROXiGb2w==", "cpu": [ "arm64" ], @@ -10491,9 +10491,9 @@ ] }, "node_modules/@oxlint/linux-x64-gnu": { - "version": "1.33.0", - "resolved": "https://registry.npmjs.org/@oxlint/linux-x64-gnu/-/linux-x64-gnu-1.33.0.tgz", - "integrity": "sha512-Z7ImLWM50FoVXzYvyxUQ+QwBkBfRyK4YdLEGonyAGMp7iT3DksonDaTK9ODnJ1qHyAyAZCvuqXD7AEDsDvzDbA==", + "version": "1.41.0", + "resolved": "https://registry.npmjs.org/@oxlint/linux-x64-gnu/-/linux-x64-gnu-1.41.0.tgz", + "integrity": "sha512-75k3CKj3fOc/a/2aSgO81s3HsTZOFROthPJ+UI2Oatic1LhvH6eKjKfx3jDDyVpzeDS2qekPlc/y3N33iZz5Og==", "cpu": [ "x64" ], @@ -10505,9 +10505,9 @@ ] }, "node_modules/@oxlint/linux-x64-musl": { - "version": "1.33.0", - "resolved": "https://registry.npmjs.org/@oxlint/linux-x64-musl/-/linux-x64-musl-1.33.0.tgz", - "integrity": "sha512-idb55Uzu5kkqqpMiVUfI9nP7zOqPZinQKsIRQAIU40wILcf/ijvhNZKIu3ucDMmye0n6IWOaSnxIRL5W2fNoUQ==", + "version": "1.41.0", + "resolved": "https://registry.npmjs.org/@oxlint/linux-x64-musl/-/linux-x64-musl-1.41.0.tgz", + "integrity": "sha512-8r82eBwGPoAPn67ZvdxTlX/Z3gVb+ZtN6nbkyFzwwHWAh8yGutX+VBcVkyrePSl6XgBP4QAaddPnHmkvJjqY0g==", "cpu": [ "x64" ], @@ -10519,9 +10519,9 @@ ] }, "node_modules/@oxlint/win32-arm64": { - "version": "1.33.0", - "resolved": "https://registry.npmjs.org/@oxlint/win32-arm64/-/win32-arm64-1.33.0.tgz", - "integrity": "sha512-wKKFt7cubfrLelNzdmDsNSmtBrlSUe1fWus587+uSxDZdpFbQ7liU0gsUlCbcHvym0H1Tc2O3K3cnLrgQORLPQ==", + "version": "1.41.0", + "resolved": "https://registry.npmjs.org/@oxlint/win32-arm64/-/win32-arm64-1.41.0.tgz", + "integrity": "sha512-aK+DAcckQsNCOXKruatyYuY/ROjNiRejQB1PeJtkZwM21+8rV9ODYbvKNvt0pW+YCws7svftBSFMCpl3ke2unw==", "cpu": [ "arm64" ], @@ -10533,9 +10533,9 @@ ] }, "node_modules/@oxlint/win32-x64": { - "version": "1.33.0", - "resolved": "https://registry.npmjs.org/@oxlint/win32-x64/-/win32-x64-1.33.0.tgz", - "integrity": "sha512-ReyR8rNHjKNnO7dxGny9RCPELRAdhm3y780FNBcA07E1wvxSCkB+Mn5db0Pa5bRmxrsU/MTZ/aaBFa+ERXDdXw==", + "version": "1.41.0", + "resolved": "https://registry.npmjs.org/@oxlint/win32-x64/-/win32-x64-1.41.0.tgz", + "integrity": "sha512-dVBXkZ6MGLd3owV7jvuqJsZwiF3qw7kEkDVsYVpS/O96eEvlHcxVbaPjJjrTBgikXqyC22vg3dxBU7MW0utGfw==", "cpu": [ "x64" ], @@ -46375,13 +46375,12 @@ } }, "node_modules/oxlint": { - "version": "1.33.0", - "resolved": "https://registry.npmjs.org/oxlint/-/oxlint-1.33.0.tgz", - "integrity": "sha512-4WCL0K8jiOshwJ8WrVk35VAuVaZHC0iX6asjKsrENOrynkAAGcTLLx0Urf0eXZ1Tq7r+qAe3Z9EyHMFPzVyUkg==", + "version": "1.41.0", + "resolved": "https://registry.npmjs.org/oxlint/-/oxlint-1.41.0.tgz", + "integrity": "sha512-Dyaoup82uhgAgp5xLNt4dPdvl5eSJTIzqzL7DcKbkooUE4PDViWURIPlSUF8hu5a+sCnNIp/LlQMDsKoyaLTBA==", "dev": true, "license": "MIT", "bin": { - "oxc_language_server": "bin/oxc_language_server", "oxlint": "bin/oxlint" }, "engines": { @@ -46391,17 +46390,17 @@ "url": "https://github.com/sponsors/Boshen" }, "optionalDependencies": { - "@oxlint/darwin-arm64": "1.33.0", - "@oxlint/darwin-x64": "1.33.0", - "@oxlint/linux-arm64-gnu": "1.33.0", - "@oxlint/linux-arm64-musl": "1.33.0", - "@oxlint/linux-x64-gnu": "1.33.0", - "@oxlint/linux-x64-musl": "1.33.0", - "@oxlint/win32-arm64": "1.33.0", - "@oxlint/win32-x64": "1.33.0" + "@oxlint/darwin-arm64": "1.41.0", + "@oxlint/darwin-x64": "1.41.0", + "@oxlint/linux-arm64-gnu": "1.41.0", + "@oxlint/linux-arm64-musl": "1.41.0", + "@oxlint/linux-x64-gnu": "1.41.0", + "@oxlint/linux-x64-musl": "1.41.0", + "@oxlint/win32-arm64": "1.41.0", + "@oxlint/win32-x64": "1.41.0" }, "peerDependencies": { - "oxlint-tsgolint": ">=0.9.0" + "oxlint-tsgolint": ">=0.11.1" }, "peerDependenciesMeta": { "oxlint-tsgolint": { diff --git a/superset-frontend/package.json b/superset-frontend/package.json index dc3c3a5436e9..d689f959db48 100644 --- a/superset-frontend/package.json +++ b/superset-frontend/package.json @@ -335,7 +335,7 @@ "lightningcss": "^1.30.2", "mini-css-extract-plugin": "^2.9.4", "open-cli": "^8.0.0", - "oxlint": "^1.32.0", + "oxlint": "^1.41.0", "po2json": "^0.4.5", "prettier": "3.8.0", "prettier-plugin-packagejson": "^2.5.20", diff --git a/superset-frontend/packages/superset-ui-core/src/components/DatePicker/index.tsx b/superset-frontend/packages/superset-ui-core/src/components/DatePicker/index.tsx index e26c61404377..0657805d7591 100644 --- a/superset-frontend/packages/superset-ui-core/src/components/DatePicker/index.tsx +++ b/superset-frontend/packages/superset-ui-core/src/components/DatePicker/index.tsx @@ -29,9 +29,6 @@ export const DatePicker = (props: DatePickerProps) => ( /> ); -// Disable ESLint rule to allow tsc to infer proper type for RangePicker. -// eslint-disable-next-line prefer-destructuring -export const RangePicker: typeof AntdDatePicker.RangePicker = - AntdDatePicker.RangePicker; +export const { RangePicker } = AntdDatePicker; export type { DatePickerProps, RangePickerProps }; diff --git a/superset-frontend/src/explore/components/SaveModal.tsx b/superset-frontend/src/explore/components/SaveModal.tsx index 67284fcd97b8..3b70e70a3adf 100644 --- a/superset-frontend/src/explore/components/SaveModal.tsx +++ b/superset-frontend/src/explore/components/SaveModal.tsx @@ -320,7 +320,7 @@ class SaveModal extends Component { // Go to new dashboard url if (gotodash && dashboard) { - let url = dashboard.url; + let { url } = dashboard; if (this.state.selectedTab?.value) { url += `#${this.state.selectedTab.value}`; } From 13013bbd64dbb5aca1731fab84c6262f654cf71e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 21 Jan 2026 23:10:41 +0700 Subject: [PATCH 09/21] chore(deps-dev): bump typescript-eslint from 8.53.0 to 8.53.1 in /docs (#37272) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- docs/package.json | 2 +- docs/yarn.lock | 142 +++++++++++++++++++++++----------------------- 2 files changed, 72 insertions(+), 72 deletions(-) diff --git a/docs/package.json b/docs/package.json index 53902b70bce8..bcc9cfea73b4 100644 --- a/docs/package.json +++ b/docs/package.json @@ -87,7 +87,7 @@ "globals": "^17.0.0", "prettier": "^3.8.0", "typescript": "~5.9.3", - "typescript-eslint": "^8.53.0", + "typescript-eslint": "^8.53.1", "webpack": "^5.104.1" }, "browserslist": { diff --git a/docs/yarn.lock b/docs/yarn.lock index 234ae602a552..c2d7e9275682 100644 --- a/docs/yarn.lock +++ b/docs/yarn.lock @@ -4447,100 +4447,100 @@ dependencies: "@types/yargs-parser" "*" -"@typescript-eslint/eslint-plugin@8.53.0", "@typescript-eslint/eslint-plugin@^8.52.0": - version "8.53.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.53.0.tgz#afb966c66a2fdc6158cf81118204a971a36d0fc5" - integrity sha512-eEXsVvLPu8Z4PkFibtuFJLJOTAV/nPdgtSjkGoPpddpFk3/ym2oy97jynY6ic2m6+nc5M8SE1e9v/mHKsulcJg== +"@typescript-eslint/eslint-plugin@8.53.1", "@typescript-eslint/eslint-plugin@^8.52.0": + version "8.53.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.53.1.tgz#f6640f6f8749b71d9ab457263939e8932a3c6b46" + integrity sha512-cFYYFZ+oQFi6hUnBTbLRXfTJiaQtYE3t4O692agbBl+2Zy+eqSKWtPjhPXJu1G7j4RLjKgeJPDdq3EqOwmX5Ag== dependencies: "@eslint-community/regexpp" "^4.12.2" - "@typescript-eslint/scope-manager" "8.53.0" - "@typescript-eslint/type-utils" "8.53.0" - "@typescript-eslint/utils" "8.53.0" - "@typescript-eslint/visitor-keys" "8.53.0" + "@typescript-eslint/scope-manager" "8.53.1" + "@typescript-eslint/type-utils" "8.53.1" + "@typescript-eslint/utils" "8.53.1" + "@typescript-eslint/visitor-keys" "8.53.1" ignore "^7.0.5" natural-compare "^1.4.0" ts-api-utils "^2.4.0" -"@typescript-eslint/parser@8.53.0", "@typescript-eslint/parser@^8.52.0": - version "8.53.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-8.53.0.tgz#d8bed6f12dc74e03751e5f947510ff2b165990c6" - integrity sha512-npiaib8XzbjtzS2N4HlqPvlpxpmZ14FjSJrteZpPxGUaYPlvhzlzUZ4mZyABo0EFrOWnvyd0Xxroq//hKhtAWg== +"@typescript-eslint/parser@8.53.1", "@typescript-eslint/parser@^8.52.0": + version "8.53.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-8.53.1.tgz#58d4a70cc2daee2becf7d4521d65ea1782d6ec68" + integrity sha512-nm3cvFN9SqZGXjmw5bZ6cGmvJSyJPn0wU9gHAZZHDnZl2wF9PhHv78Xf06E0MaNk4zLVHL8hb2/c32XvyJOLQg== dependencies: - "@typescript-eslint/scope-manager" "8.53.0" - "@typescript-eslint/types" "8.53.0" - "@typescript-eslint/typescript-estree" "8.53.0" - "@typescript-eslint/visitor-keys" "8.53.0" + "@typescript-eslint/scope-manager" "8.53.1" + "@typescript-eslint/types" "8.53.1" + "@typescript-eslint/typescript-estree" "8.53.1" + "@typescript-eslint/visitor-keys" "8.53.1" debug "^4.4.3" -"@typescript-eslint/project-service@8.53.0": - version "8.53.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/project-service/-/project-service-8.53.0.tgz#327c67c61c16a1c8b12a440b0779b41eb77cc7df" - integrity sha512-Bl6Gdr7NqkqIP5yP9z1JU///Nmes4Eose6L1HwpuVHwScgDPPuEWbUVhvlZmb8hy0vX9syLk5EGNL700WcBlbg== +"@typescript-eslint/project-service@8.53.1": + version "8.53.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/project-service/-/project-service-8.53.1.tgz#4e47856a0b14a1ceb28b0294b4badef3be1e9734" + integrity sha512-WYC4FB5Ra0xidsmlPb+1SsnaSKPmS3gsjIARwbEkHkoWloQmuzcfypljaJcR78uyLA1h8sHdWWPHSLDI+MtNog== dependencies: - "@typescript-eslint/tsconfig-utils" "^8.53.0" - "@typescript-eslint/types" "^8.53.0" + "@typescript-eslint/tsconfig-utils" "^8.53.1" + "@typescript-eslint/types" "^8.53.1" debug "^4.4.3" -"@typescript-eslint/scope-manager@8.53.0": - version "8.53.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-8.53.0.tgz#f922fcbf0d42e72f065297af31779ccf19de9a97" - integrity sha512-kWNj3l01eOGSdVBnfAF2K1BTh06WS0Yet6JUgb9Cmkqaz3Jlu0fdVUjj9UI8gPidBWSMqDIglmEXifSgDT/D0g== +"@typescript-eslint/scope-manager@8.53.1": + version "8.53.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-8.53.1.tgz#6c4b8c82cd45ae3b365afc2373636e166743a8fa" + integrity sha512-Lu23yw1uJMFY8cUeq7JlrizAgeQvWugNQzJp8C3x8Eo5Jw5Q2ykMdiiTB9vBVOOUBysMzmRRmUfwFrZuI2C4SQ== dependencies: - "@typescript-eslint/types" "8.53.0" - "@typescript-eslint/visitor-keys" "8.53.0" + "@typescript-eslint/types" "8.53.1" + "@typescript-eslint/visitor-keys" "8.53.1" -"@typescript-eslint/tsconfig-utils@8.53.0", "@typescript-eslint/tsconfig-utils@^8.53.0": - version "8.53.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.53.0.tgz#105279d7969a7abdc8345cc9c57cff83cf910f8f" - integrity sha512-K6Sc0R5GIG6dNoPdOooQ+KtvT5KCKAvTcY8h2rIuul19vxH5OTQk7ArKkd4yTzkw66WnNY0kPPzzcmWA+XRmiA== +"@typescript-eslint/tsconfig-utils@8.53.1", "@typescript-eslint/tsconfig-utils@^8.53.1": + version "8.53.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.53.1.tgz#efe80b8d019cd49e5a1cf46c2eb0cd2733076424" + integrity sha512-qfvLXS6F6b1y43pnf0pPbXJ+YoXIC7HKg0UGZ27uMIemKMKA6XH2DTxsEDdpdN29D+vHV07x/pnlPNVLhdhWiA== -"@typescript-eslint/type-utils@8.53.0": - version "8.53.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/type-utils/-/type-utils-8.53.0.tgz#81a0de5c01fc68f6df0591d03cd8226bda01c91f" - integrity sha512-BBAUhlx7g4SmcLhn8cnbxoxtmS7hcq39xKCgiutL3oNx1TaIp+cny51s8ewnKMpVUKQUGb41RAUWZ9kxYdovuw== +"@typescript-eslint/type-utils@8.53.1": + version "8.53.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/type-utils/-/type-utils-8.53.1.tgz#95de2651a96d580bf5c6c6089ddd694284d558ad" + integrity sha512-MOrdtNvyhy0rHyv0ENzub1d4wQYKb2NmIqG7qEqPWFW7Mpy2jzFC3pQ2yKDvirZB7jypm5uGjF2Qqs6OIqu47w== dependencies: - "@typescript-eslint/types" "8.53.0" - "@typescript-eslint/typescript-estree" "8.53.0" - "@typescript-eslint/utils" "8.53.0" + "@typescript-eslint/types" "8.53.1" + "@typescript-eslint/typescript-estree" "8.53.1" + "@typescript-eslint/utils" "8.53.1" debug "^4.4.3" ts-api-utils "^2.4.0" -"@typescript-eslint/types@8.53.0", "@typescript-eslint/types@^8.53.0": - version "8.53.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-8.53.0.tgz#1adcad3fa32bc2c4cbf3785ba07a5e3151819efb" - integrity sha512-Bmh9KX31Vlxa13+PqPvt4RzKRN1XORYSLlAE+sO1i28NkisGbTtSLFVB3l7PWdHtR3E0mVMuC7JilWJ99m2HxQ== +"@typescript-eslint/types@8.53.1", "@typescript-eslint/types@^8.53.1": + version "8.53.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-8.53.1.tgz#101f203f0807a63216cceceedb815fabe21d5793" + integrity sha512-jr/swrr2aRmUAUjW5/zQHbMaui//vQlsZcJKijZf3M26bnmLj8LyZUpj8/Rd6uzaek06OWsqdofN/Thenm5O8A== -"@typescript-eslint/typescript-estree@8.53.0": - version "8.53.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-8.53.0.tgz#7805b46b7a8ce97e91b7bb56fc8b1ba26ca8ef52" - integrity sha512-pw0c0Gdo7Z4xOG987u3nJ8akL9093yEEKv8QTJ+Bhkghj1xyj8cgPaavlr9rq8h7+s6plUJ4QJYw2gCZodqmGw== +"@typescript-eslint/typescript-estree@8.53.1": + version "8.53.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-8.53.1.tgz#b6dce2303c9e27e95b8dcd8c325868fff53e488f" + integrity sha512-RGlVipGhQAG4GxV1s34O91cxQ/vWiHJTDHbXRr0li2q/BGg3RR/7NM8QDWgkEgrwQYCvmJV9ichIwyoKCQ+DTg== dependencies: - "@typescript-eslint/project-service" "8.53.0" - "@typescript-eslint/tsconfig-utils" "8.53.0" - "@typescript-eslint/types" "8.53.0" - "@typescript-eslint/visitor-keys" "8.53.0" + "@typescript-eslint/project-service" "8.53.1" + "@typescript-eslint/tsconfig-utils" "8.53.1" + "@typescript-eslint/types" "8.53.1" + "@typescript-eslint/visitor-keys" "8.53.1" debug "^4.4.3" minimatch "^9.0.5" semver "^7.7.3" tinyglobby "^0.2.15" ts-api-utils "^2.4.0" -"@typescript-eslint/utils@8.53.0": - version "8.53.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/utils/-/utils-8.53.0.tgz#bf0a4e2edaf1afc9abce209fc02f8cab0b74af13" - integrity sha512-XDY4mXTez3Z1iRDI5mbRhH4DFSt46oaIFsLg+Zn97+sYrXACziXSQcSelMybnVZ5pa1P6xYkPr5cMJyunM1ZDA== +"@typescript-eslint/utils@8.53.1": + version "8.53.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/utils/-/utils-8.53.1.tgz#81fe6c343de288701b774f4d078382f567e6edaa" + integrity sha512-c4bMvGVWW4hv6JmDUEG7fSYlWOl3II2I4ylt0NM+seinYQlZMQIaKaXIIVJWt9Ofh6whrpM+EdDQXKXjNovvrg== dependencies: "@eslint-community/eslint-utils" "^4.9.1" - "@typescript-eslint/scope-manager" "8.53.0" - "@typescript-eslint/types" "8.53.0" - "@typescript-eslint/typescript-estree" "8.53.0" + "@typescript-eslint/scope-manager" "8.53.1" + "@typescript-eslint/types" "8.53.1" + "@typescript-eslint/typescript-estree" "8.53.1" -"@typescript-eslint/visitor-keys@8.53.0": - version "8.53.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-8.53.0.tgz#9a785664ddae7e3f7e570ad8166e48dbc9c6cf02" - integrity sha512-LZ2NqIHFhvFwxG0qZeLL9DvdNAHPGCY5dIRwBhyYeU+LfLhcStE1ImjsuTG/WaVh3XysGaeLW8Rqq7cGkPCFvw== +"@typescript-eslint/visitor-keys@8.53.1": + version "8.53.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-8.53.1.tgz#405f04959be22b9be364939af8ac19c3649b6eb7" + integrity sha512-oy+wV7xDKFPRyNggmXuZQSBzvoLnpmJs+GhzRhPjrxl2b/jIlyjVokzm47CZCDUdXKr2zd7ZLodPfOBpOPyPlg== dependencies: - "@typescript-eslint/types" "8.53.0" + "@typescript-eslint/types" "8.53.1" eslint-visitor-keys "^4.2.1" "@ungap/structured-clone@^1.0.0": @@ -13464,15 +13464,15 @@ types-ramda@^0.30.1: dependencies: ts-toolbelt "^9.6.0" -typescript-eslint@^8.53.0: - version "8.53.0" - resolved "https://registry.yarnpkg.com/typescript-eslint/-/typescript-eslint-8.53.0.tgz#c35ca6403cd381753aee325f67e10d6101d55f04" - integrity sha512-xHURCQNxZ1dsWn0sdOaOfCSQG0HKeqSj9OexIxrz6ypU6wHYOdX2I3D2b8s8wFSsSOYJb+6q283cLiLlkEsBYw== +typescript-eslint@^8.53.1: + version "8.53.1" + resolved "https://registry.yarnpkg.com/typescript-eslint/-/typescript-eslint-8.53.1.tgz#e8d2888083af4638d2952b938d69458f54865921" + integrity sha512-gB+EVQfP5RDElh9ittfXlhZJdjSU4jUSTyE2+ia8CYyNvet4ElfaLlAIqDvQV9JPknKx0jQH1racTYe/4LaLSg== dependencies: - "@typescript-eslint/eslint-plugin" "8.53.0" - "@typescript-eslint/parser" "8.53.0" - "@typescript-eslint/typescript-estree" "8.53.0" - "@typescript-eslint/utils" "8.53.0" + "@typescript-eslint/eslint-plugin" "8.53.1" + "@typescript-eslint/parser" "8.53.1" + "@typescript-eslint/typescript-estree" "8.53.1" + "@typescript-eslint/utils" "8.53.1" typescript@~5.9.3: version "5.9.3" From 2c1a33fd32c0386ff8b72b664e0b37a0b8ab7554 Mon Sep 17 00:00:00 2001 From: Sam Firke Date: Wed, 21 Jan 2026 13:52:42 -0500 Subject: [PATCH 10/21] fix(roles): allow Public role to read themes (#37295) --- superset/security/manager.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/superset/security/manager.py b/superset/security/manager.py index 1a3532f262d5..57e541cbca69 100644 --- a/superset/security/manager.py +++ b/superset/security/manager.py @@ -433,8 +433,9 @@ class SupersetSecurityManager( # pylint: disable=too-many-public-methods ("can_time_range", "Api"), ("can_query_form_data", "Api"), ("can_query", "Api"), - # CSS for dashboard styling + # CSS and themes for dashboard styling ("can_read", "CssTemplate"), + ("can_read", "Theme"), # Embedded dashboard support ("can_read", "EmbeddedDashboard"), # Datasource metadata for chart rendering From b460ca94c61f85c45c8d4e95491246b1a8bcba8c Mon Sep 17 00:00:00 2001 From: Evan Rusackas Date: Wed, 21 Jan 2026 10:54:01 -0800 Subject: [PATCH 11/21] feat(docs): auto-generate database documentation from lib.py (#36805) Co-authored-by: Claude Opus 4.5 --- .github/workflows/superset-docs-deploy.yml | 41 +- .github/workflows/superset-docs-verify.yml | 62 +- .../superset-python-integrationtest.yml | 30 + .pre-commit-config.yaml | 7 + .rat-excludes | 3 + AGENTS.md | 21 + README.md | 81 +- docs/.gitignore | 7 + docs/docs/configuration/databases.mdx | 2004 ------- docs/docusaurus.config.ts | 9 +- docs/package.json | 11 +- docs/scripts/generate-database-docs.mjs | 867 +++ docs/sidebars.js | 14 + docs/src/components/SectionHeader.tsx | 12 +- .../components/databases/DatabaseIndex.tsx | 578 ++ .../src/components/databases/DatabasePage.tsx | 634 +++ docs/src/components/databases/index.ts | 22 + docs/src/components/databases/types.ts | 243 + docs/src/data/databases.json | 4799 +++++++++++++++++ docs/src/pages/index.tsx | 52 +- docs/src/styles/custom.css | 5 + docs/static/img/databases/altinity.png | Bin 0 -> 19819 bytes docs/static/img/databases/amazon-redshift.jpg | Bin 16973 -> 0 bytes docs/static/img/databases/apache-druid.jpeg | Bin 214904 -> 0 bytes docs/static/img/databases/apache-impala.png | Bin 5216 -> 25804 bytes docs/static/img/databases/apache-solr.png | Bin 0 -> 5584 bytes docs/static/img/databases/apache-solr.svg | 40 + docs/static/img/databases/apache-spark.png | Bin 0 -> 26999 bytes docs/static/img/databases/ascend.webp | Bin 0 -> 35644 bytes docs/static/img/databases/aws-aurora.jpg | Bin 0 -> 80321 bytes docs/static/img/databases/aws.png | Bin 0 -> 7868 bytes docs/static/img/databases/azure.svg | 1 + docs/static/img/databases/celerdata.png | Bin 0 -> 41661 bytes docs/static/img/databases/cloudflare.png | Bin 0 -> 2841 bytes docs/static/img/databases/cockroachdb.png | Bin 0 -> 242795 bytes docs/static/img/databases/cratedb.png | Bin 0 -> 1534 bytes docs/static/img/databases/db2.png | Bin 7493 -> 0 bytes docs/static/img/databases/duckdb.png | Bin 0 -> 8369 bytes docs/static/img/databases/elasticsearch.png | Bin 0 -> 13770 bytes docs/static/img/databases/google-biquery.png | Bin 15539 -> 0 bytes docs/static/img/databases/greenplum.jpeg | Bin 7559 -> 0 bytes docs/static/img/databases/ibmdb2.png | Bin 14127 -> 0 bytes docs/static/img/databases/imply.png | Bin 0 -> 15978 bytes docs/static/img/databases/kusto.png | Bin 0 -> 2493 bytes docs/static/img/databases/monet.png | Bin 21830 -> 0 bytes docs/static/img/databases/motherduck.png | Bin 0 -> 82556 bytes docs/static/img/databases/mssql-server.png | Bin 695548 -> 0 bytes docs/static/img/databases/mssql.jpg | Bin 38434 -> 0 bytes docs/static/img/databases/mysql.jpg | Bin 42653 -> 0 bytes docs/static/img/databases/oracle-logo.png | Bin 10347 -> 0 bytes docs/static/img/databases/oracle.png | Bin 8231 -> 0 bytes docs/static/img/databases/pinot.png | Bin 7127 -> 0 bytes docs/static/img/databases/postgresql.jpg | Bin 19019 -> 0 bytes docs/static/img/databases/risingwave.png | Bin 0 -> 1548 bytes docs/static/img/databases/sap-hana.jpg | Bin 20343 -> 0 bytes docs/static/img/databases/shillelagh.png | Bin 0 -> 113600 bytes docs/static/img/databases/singlestore.png | Bin 0 -> 15978 bytes docs/static/img/databases/snowflake.png | Bin 15635 -> 0 bytes docs/static/img/databases/sqlite.jpg | Bin 13006 -> 0 bytes docs/static/img/databases/starburst.png | Bin 0 -> 20343 bytes docs/static/img/databases/superset.svg | 43 + docs/static/img/databases/trino2.jpg | Bin 36149 -> 0 bytes docs/tsconfig.json | 2 + docs/versions-config.json | 4 +- pyproject.toml | 1 + superset/db_engine_specs/METADATA_STATUS.md | 153 + superset/db_engine_specs/README.md | 154 + superset/db_engine_specs/arc.py | 80 + superset/db_engine_specs/ascend.py | 19 + superset/db_engine_specs/athena.py | 60 +- superset/db_engine_specs/aurora.py | 12 + superset/db_engine_specs/base.py | 134 + superset/db_engine_specs/bigquery.py | 53 +- superset/db_engine_specs/clickhouse.py | 118 +- superset/db_engine_specs/cockroachdb.py | 17 + superset/db_engine_specs/couchbase.py | 30 + superset/db_engine_specs/crate.py | 26 +- superset/db_engine_specs/d1.py | 51 + superset/db_engine_specs/databend.py | 39 +- superset/db_engine_specs/databricks.py | 96 +- superset/db_engine_specs/db2.py | 57 +- superset/db_engine_specs/denodo.py | 36 +- superset/db_engine_specs/doris.py | 27 + superset/db_engine_specs/dremio.py | 36 +- superset/db_engine_specs/drill.py | 57 +- superset/db_engine_specs/druid.py | 74 +- superset/db_engine_specs/duckdb.py | 84 +- superset/db_engine_specs/dynamodb.py | 27 +- superset/db_engine_specs/elasticsearch.py | 91 +- superset/db_engine_specs/exasol.py | 47 +- superset/db_engine_specs/firebird.py | 22 +- superset/db_engine_specs/firebolt.py | 39 +- superset/db_engine_specs/greenplum.py | 55 + superset/db_engine_specs/gsheets.py | 17 + superset/db_engine_specs/hana.py | 18 + superset/db_engine_specs/hive.py | 18 +- superset/db_engine_specs/hologres.py | 60 + superset/db_engine_specs/ibmi.py | 6 + superset/db_engine_specs/impala.py | 19 +- superset/db_engine_specs/kusto.py | 66 +- superset/db_engine_specs/kylin.py | 19 +- superset/db_engine_specs/lib.py | 167 +- superset/db_engine_specs/lint_metadata.py | 705 +++ superset/db_engine_specs/mariadb.py | 15 + superset/db_engine_specs/monetdb.py | 75 + superset/db_engine_specs/mssql.py | 56 +- superset/db_engine_specs/mysql.py | 104 +- superset/db_engine_specs/netezza.py | 14 + superset/db_engine_specs/oceanbase.py | 13 + superset/db_engine_specs/ocient.py | 13 +- superset/db_engine_specs/oracle.py | 17 +- superset/db_engine_specs/parseable.py | 27 +- superset/db_engine_specs/pinot.py | 36 +- superset/db_engine_specs/postgres.py | 143 +- superset/db_engine_specs/presto.py | 26 +- superset/db_engine_specs/redshift.py | 82 +- superset/db_engine_specs/risingwave.py | 17 + superset/db_engine_specs/shillelagh.py | 17 + superset/db_engine_specs/singlestore.py | 36 + superset/db_engine_specs/snowflake.py | 58 +- superset/db_engine_specs/solr.py | 19 +- superset/db_engine_specs/spark.py | 15 + superset/db_engine_specs/sqlite.py | 15 +- superset/db_engine_specs/starrocks.py | 85 + superset/db_engine_specs/superset.py | 17 + superset/db_engine_specs/sybase.py | 54 + superset/db_engine_specs/tdengine.py | 19 +- superset/db_engine_specs/teradata.py | 31 +- superset/db_engine_specs/timescaledb.py | 62 + superset/db_engine_specs/trino.py | 83 +- superset/db_engine_specs/vertica.py | 23 + superset/db_engine_specs/ydb.py | 47 +- superset/db_engine_specs/yugabytedb.py | 53 + 133 files changed, 11531 insertions(+), 2123 deletions(-) delete mode 100644 docs/docs/configuration/databases.mdx create mode 100644 docs/scripts/generate-database-docs.mjs create mode 100644 docs/src/components/databases/DatabaseIndex.tsx create mode 100644 docs/src/components/databases/DatabasePage.tsx create mode 100644 docs/src/components/databases/index.ts create mode 100644 docs/src/components/databases/types.ts create mode 100644 docs/src/data/databases.json create mode 100644 docs/static/img/databases/altinity.png delete mode 100644 docs/static/img/databases/amazon-redshift.jpg delete mode 100644 docs/static/img/databases/apache-druid.jpeg create mode 100644 docs/static/img/databases/apache-solr.png create mode 100644 docs/static/img/databases/apache-solr.svg create mode 100644 docs/static/img/databases/apache-spark.png create mode 100644 docs/static/img/databases/ascend.webp create mode 100644 docs/static/img/databases/aws-aurora.jpg create mode 100644 docs/static/img/databases/aws.png create mode 100644 docs/static/img/databases/azure.svg create mode 100644 docs/static/img/databases/celerdata.png create mode 100644 docs/static/img/databases/cloudflare.png create mode 100644 docs/static/img/databases/cockroachdb.png create mode 100644 docs/static/img/databases/cratedb.png delete mode 100644 docs/static/img/databases/db2.png create mode 100644 docs/static/img/databases/duckdb.png create mode 100644 docs/static/img/databases/elasticsearch.png delete mode 100644 docs/static/img/databases/google-biquery.png delete mode 100644 docs/static/img/databases/greenplum.jpeg delete mode 100644 docs/static/img/databases/ibmdb2.png create mode 100644 docs/static/img/databases/imply.png create mode 100644 docs/static/img/databases/kusto.png delete mode 100644 docs/static/img/databases/monet.png create mode 100644 docs/static/img/databases/motherduck.png delete mode 100644 docs/static/img/databases/mssql-server.png delete mode 100644 docs/static/img/databases/mssql.jpg delete mode 100644 docs/static/img/databases/mysql.jpg delete mode 100644 docs/static/img/databases/oracle-logo.png delete mode 100644 docs/static/img/databases/oracle.png delete mode 100644 docs/static/img/databases/pinot.png delete mode 100644 docs/static/img/databases/postgresql.jpg create mode 100644 docs/static/img/databases/risingwave.png delete mode 100644 docs/static/img/databases/sap-hana.jpg create mode 100644 docs/static/img/databases/shillelagh.png create mode 100644 docs/static/img/databases/singlestore.png delete mode 100644 docs/static/img/databases/snowflake.png delete mode 100644 docs/static/img/databases/sqlite.jpg create mode 100644 docs/static/img/databases/starburst.png create mode 100644 docs/static/img/databases/superset.svg delete mode 100644 docs/static/img/databases/trino2.jpg create mode 100644 superset/db_engine_specs/METADATA_STATUS.md create mode 100644 superset/db_engine_specs/arc.py create mode 100644 superset/db_engine_specs/d1.py create mode 100644 superset/db_engine_specs/greenplum.py create mode 100644 superset/db_engine_specs/hologres.py create mode 100644 superset/db_engine_specs/lint_metadata.py create mode 100644 superset/db_engine_specs/monetdb.py create mode 100644 superset/db_engine_specs/sybase.py create mode 100644 superset/db_engine_specs/timescaledb.py create mode 100644 superset/db_engine_specs/yugabytedb.py diff --git a/.github/workflows/superset-docs-deploy.yml b/.github/workflows/superset-docs-deploy.yml index a391d2ae5b11..3371b398fdc7 100644 --- a/.github/workflows/superset-docs-deploy.yml +++ b/.github/workflows/superset-docs-deploy.yml @@ -1,6 +1,13 @@ name: Docs Deployment on: + # Deploy after integration tests complete on master + workflow_run: + workflows: ["Python-Integration"] + types: [completed] + branches: [master] + + # Also allow manual trigger and direct pushes to docs push: paths: - "docs/**" @@ -30,9 +37,10 @@ jobs: name: Build & Deploy runs-on: ubuntu-24.04 steps: - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" + - name: "Checkout ${{ github.event.workflow_run.head_sha || github.sha }}" uses: actions/checkout@v6 with: + ref: ${{ github.event.workflow_run.head_sha || github.sha }} persist-credentials: false submodules: recursive - name: Set up Node.js @@ -58,6 +66,35 @@ jobs: working-directory: docs run: | yarn install --check-cache + - name: Download database diagnostics (if triggered by integration tests) + if: github.event_name == 'workflow_run' && github.event.workflow_run.conclusion == 'success' + uses: dawidd6/action-download-artifact@v6 + continue-on-error: true + with: + workflow: superset-python-integrationtest.yml + run_id: ${{ github.event.workflow_run.id }} + name: database-diagnostics + path: docs/src/data/ + - name: Try to download latest diagnostics (for push/dispatch triggers) + if: github.event_name != 'workflow_run' + uses: dawidd6/action-download-artifact@v6 + continue-on-error: true + with: + workflow: superset-python-integrationtest.yml + name: database-diagnostics + path: docs/src/data/ + branch: master + search_artifacts: true + if_no_artifact_found: warn + - name: Use diagnostics artifact if available + working-directory: docs + run: | + if [ -f "src/data/databases-diagnostics.json" ]; then + echo "Using fresh diagnostics from integration tests" + mv src/data/databases-diagnostics.json src/data/databases.json + else + echo "Using committed databases.json (no artifact found)" + fi - name: yarn build working-directory: docs run: | @@ -71,5 +108,5 @@ jobs: destination-github-username: "apache" destination-repository-name: "superset-site" target-branch: "asf-site" - commit-message: "deploying docs: ${{ github.event.head_commit.message }} (apache/superset@${{ github.sha }})" + commit-message: "deploying docs: ${{ github.event.head_commit.message || 'triggered by integration tests' }} (apache/superset@${{ github.event.workflow_run.head_sha || github.sha }})" user-email: dev@superset.apache.org diff --git a/.github/workflows/superset-docs-verify.yml b/.github/workflows/superset-docs-verify.yml index 968b7cedb683..90e8abc40b33 100644 --- a/.github/workflows/superset-docs-verify.yml +++ b/.github/workflows/superset-docs-verify.yml @@ -4,17 +4,23 @@ on: pull_request: paths: - "docs/**" + - "superset/db_engine_specs/**" - ".github/workflows/superset-docs-verify.yml" types: [synchronize, opened, reopened, ready_for_review] + workflow_run: + workflows: ["Python-Integration"] + types: [completed] # cancel previous workflow jobs for PRs concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }} + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.event.workflow_run.head_sha || github.run_id }} cancel-in-progress: true jobs: linkinator: # See docs here: https://github.com/marketplace/actions/linkinator + # Only run on pull_request, not workflow_run + if: github.event_name == 'pull_request' name: Link Checking runs-on: ubuntu-latest steps: @@ -50,8 +56,11 @@ jobs: https://timbr.ai/, https://opensource.org/license/apache-2-0, https://www.plaidcloud.com/ - build-deploy: - name: Build & Deploy + + build-on-pr: + # Build docs when PR changes docs/** (uses committed databases.json) + if: github.event_name == 'pull_request' + name: Build (PR trigger) runs-on: ubuntu-24.04 defaults: run: @@ -75,3 +84,50 @@ jobs: - name: yarn build run: | yarn build + + build-after-tests: + # Build docs after integration tests complete (uses fresh diagnostics) + # Only runs if integration tests succeeded + if: > + github.event_name == 'workflow_run' && + github.event.workflow_run.conclusion == 'success' + name: Build (after integration tests) + runs-on: ubuntu-24.04 + defaults: + run: + working-directory: docs + steps: + - name: "Checkout PR head: ${{ github.event.workflow_run.head_sha }}" + uses: actions/checkout@v6 + with: + ref: ${{ github.event.workflow_run.head_sha }} + persist-credentials: false + submodules: recursive + - name: Set up Node.js + uses: actions/setup-node@v6 + with: + node-version-file: './docs/.nvmrc' + - name: yarn install + run: | + yarn install --check-cache + - name: Download database diagnostics from integration tests + uses: dawidd6/action-download-artifact@v6 + with: + workflow: superset-python-integrationtest.yml + run_id: ${{ github.event.workflow_run.id }} + name: database-diagnostics + path: docs/src/data/ + - name: Use fresh diagnostics + run: | + if [ -f "src/data/databases-diagnostics.json" ]; then + echo "Using fresh diagnostics from integration tests" + mv src/data/databases-diagnostics.json src/data/databases.json + else + echo "Warning: No diagnostics artifact found, using committed data" + fi + - name: yarn typecheck + run: | + yarn typecheck + - name: yarn build + run: | + yarn build diff --git a/.github/workflows/superset-python-integrationtest.yml b/.github/workflows/superset-python-integrationtest.yml index 5ccb2d3bf85c..6de1cf0f31ea 100644 --- a/.github/workflows/superset-python-integrationtest.yml +++ b/.github/workflows/superset-python-integrationtest.yml @@ -73,6 +73,36 @@ jobs: flags: python,mysql token: ${{ secrets.CODECOV_TOKEN }} verbose: true + - name: Generate database diagnostics for docs + if: steps.check.outputs.python + env: + SUPERSET_CONFIG: tests.integration_tests.superset_test_config + SUPERSET__SQLALCHEMY_DATABASE_URI: | + mysql+mysqldb://superset:superset@127.0.0.1:13306/superset?charset=utf8mb4&binary_prefix=true + run: | + python -c " + import json + from superset.app import create_app + from superset.db_engine_specs.lib import generate_yaml_docs + app = create_app() + with app.app_context(): + docs = generate_yaml_docs() + # Wrap in the expected format + output = { + 'generated': '$(date -Iseconds)', + 'databases': docs + } + with open('databases-diagnostics.json', 'w') as f: + json.dump(output, f, indent=2, default=str) + print(f'Generated diagnostics for {len(docs)} databases') + " + - name: Upload database diagnostics artifact + if: steps.check.outputs.python + uses: actions/upload-artifact@v4 + with: + name: database-diagnostics + path: databases-diagnostics.json + retention-days: 7 test-postgres: runs-on: ubuntu-24.04 strategy: diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0ca94fca54fe..0839bc972962 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -142,3 +142,10 @@ repos: else echo "No Python files to lint." fi + - id: db-engine-spec-metadata + name: database engine spec metadata validation + entry: python superset/db_engine_specs/lint_metadata.py --strict + language: system + files: ^superset/db_engine_specs/.*\.py$ + exclude: ^superset/db_engine_specs/(base|lib|lint_metadata|__init__)\.py$ + pass_filenames: false diff --git a/.rat-excludes b/.rat-excludes index afe8329057c4..78db582935e2 100644 --- a/.rat-excludes +++ b/.rat-excludes @@ -75,6 +75,9 @@ postgresql.svg snowflake.svg ydb.svg loading.svg +apache-solr.svg +azure.svg +superset.svg # docs third-party logos, i.e. docs/static/img/logos/* logos/* diff --git a/AGENTS.md b/AGENTS.md index 1b676b0157f1..6e1efb4a1bd8 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -2,6 +2,27 @@ Apache Superset is a data visualization platform with Flask/Python backend and React/TypeScript frontend. +## ⚠️ CRITICAL: Always Run Pre-commit Before Pushing + +**ALWAYS run `pre-commit run --all-files` before pushing commits.** CI will fail if pre-commit checks don't pass. This is non-negotiable. + +```bash +# Stage your changes first +git add . + +# Run pre-commit on all files +pre-commit run --all-files + +# If there are auto-fixes, stage them and commit +git add . +git commit --amend # or new commit +``` + +Common pre-commit failures: +- **Formatting** - black, prettier, eslint will auto-fix +- **Type errors** - mypy failures need manual fixes +- **Linting** - ruff, pylint issues need manual fixes + ## ⚠️ CRITICAL: Ongoing Refactors (What NOT to Do) **These migrations are actively happening - avoid deprecated patterns:** diff --git a/README.md b/README.md index f7d59ee6915b..261813bd260c 100644 --- a/README.md +++ b/README.md @@ -101,51 +101,54 @@ Superset provides: ## Supported Databases -Superset can query data from any SQL-speaking datastore or data engine (Presto, Trino, Athena, [and more](https://superset.apache.org/docs/configuration/databases)) that has a Python DB-API driver and a SQLAlchemy dialect. +Superset can query data from any SQL-speaking datastore or data engine (Presto, Trino, Athena, [and more](https://superset.apache.org/docs/databases)) that has a Python DB-API driver and a SQLAlchemy dialect. Here are some of the major database solutions that are supported: +

- redshift - google-bigquery - snowflake - trino - presto - databricks - druid - firebolt - timescale - postgresql - mysql - mssql-server - db2 - sqlite - sybase - mariadb - vertica - oracle - firebird - greenplum - clickhouse - exasol - monet-db - apache-kylin - hologres - netezza - pinot - teradata - yugabyte - databend - starrocks - doris - oceanbase - sap-hana - denodo - ydb - TDengine + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

+ -**A more comprehensive list of supported databases** along with the configuration instructions can be found [here](https://superset.apache.org/docs/configuration/databases). +**A more comprehensive list of supported databases** along with the configuration instructions can be found [here](https://superset.apache.org/docs/databases). Want to add support for your datastore or data engine? Read more [here](https://superset.apache.org/docs/frequently-asked-questions#does-superset-work-with-insert-database-engine-here) about the technical requirements. diff --git a/docs/.gitignore b/docs/.gitignore index 6880a0863d92..90904a9ff469 100644 --- a/docs/.gitignore +++ b/docs/.gitignore @@ -26,3 +26,10 @@ docs/intro.md # Generated badge images (downloaded at build time by remark-localize-badges plugin) static/badges/ + +# Generated database documentation MDX files (regenerated at build time) +# Source of truth is in superset/db_engine_specs/*.py metadata attributes +docs/databases/ + +# Note: src/data/databases.json is COMMITTED (not ignored) to preserve feature diagnostics +# that require Flask context to generate. Update it locally with: npm run gen-db-docs diff --git a/docs/docs/configuration/databases.mdx b/docs/docs/configuration/databases.mdx deleted file mode 100644 index 98cd9c58a223..000000000000 --- a/docs/docs/configuration/databases.mdx +++ /dev/null @@ -1,2004 +0,0 @@ ---- -title: Connecting to Databases -hide_title: true -sidebar_position: 1 -version: 1 ---- -# Connecting to Databases - -Superset does not ship bundled with connectivity to databases. The main step in connecting -Superset to a database is to **install the proper database driver(s)** -in your environment. - -:::note -You’ll need to install the required packages for the database you want to use as your metadata database -as well as the packages needed to connect to the databases you want to access through Superset. -For information about setting up Superset's metadata database, please refer to -installation documentations ([Docker Compose](/docs/installation/docker-compose), [Kubernetes](/docs/installation/kubernetes)) -::: - -This documentation tries to keep pointer to the different drivers for commonly used database -engine. - -## Installing Database Drivers - -Superset requires a Python [DB-API database driver](https://peps.python.org/pep-0249/) -and a [SQLAlchemy dialect](https://docs.sqlalchemy.org/en/20/dialects/) to be installed for -each database engine you want to connect to. - -You can read more [here](/docs/configuration/databases#installing-drivers-in-docker-images) about how to -install new database drivers into your Superset configuration. - -### Supported Databases and Dependencies - -Some of the recommended packages are shown below. Please refer to -[pyproject.toml](https://github.com/apache/superset/blob/master/pyproject.toml) for the versions that -are compatible with Superset. - -|
Database
| PyPI package | Connection String | -| --------------------------------------------------------- | ---------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------ | -| [AWS Athena](/docs/configuration/databases#aws-athena) | `pip install pyathena[pandas]` , `pip install PyAthenaJDBC` | `awsathena+rest://{access_key_id}:{access_key}@athena.{region}.amazonaws.com/{schema}?s3_staging_dir={s3_staging_dir}&...` | -| [AWS DynamoDB](/docs/configuration/databases#aws-dynamodb) | `pip install pydynamodb` | `dynamodb://{access_key_id}:{secret_access_key}@dynamodb.{region_name}.amazonaws.com?connector=superset` | -| [AWS Redshift](/docs/configuration/databases#aws-redshift) | `pip install sqlalchemy-redshift` | `redshift+psycopg2://:@:5439/` | -| [Apache Doris](/docs/configuration/databases#apache-doris) | `pip install pydoris` | `doris://:@:/.` | -| [Apache Drill](/docs/configuration/databases#apache-drill) | `pip install sqlalchemy-drill` | `drill+sadrill://:@:/`, often useful: `?use_ssl=True/False` | -| [Apache Druid](/docs/configuration/databases#apache-druid) | `pip install pydruid` | `druid://:@:/druid/v2/sql` | -| [Apache Hive](/docs/configuration/databases#hive) | `pip install pyhive` | `hive://hive@{hostname}:{port}/{database}` | -| [Apache Impala](/docs/configuration/databases#apache-impala) | `pip install impyla` | `impala://{hostname}:{port}/{database}` | -| [Apache Kylin](/docs/configuration/databases#apache-kylin) | `pip install kylinpy` | `kylin://:@:/?=&=` | -| [Apache Pinot](/docs/configuration/databases#apache-pinot) | `pip install pinotdb` | `pinot://BROKER:5436/query?server=http://CONTROLLER:5983/` | -| [Apache Solr](/docs/configuration/databases#apache-solr) | `pip install sqlalchemy-solr` | `solr://{username}:{password}@{hostname}:{port}/{server_path}/{collection}` | -| [Apache Spark SQL](/docs/configuration/databases#apache-spark-sql) | `pip install pyhive` | `hive://hive@{hostname}:{port}/{database}` | -| [Arc - Apache Arrow](/docs/configuration/databases#arc-arrow) | `pip install arc-superset-arrow` | `arc+arrow://{api_key}@{hostname}:{port}/{database}` | -| [Arc - JSON](/docs/configuration/databases#arc-json) | `pip install arc-superset-dialect` | `arc+json://{api_key}@{hostname}:{port}/{database}` | -| [Ascend.io](/docs/configuration/databases#ascendio) | `pip install impyla` | `ascend://{username}:{password}@{hostname}:{port}/{database}?auth_mechanism=PLAIN;use_ssl=true` | -| [Azure MS SQL](/docs/configuration/databases#sql-server) | `pip install pymssql` | `mssql+pymssql://UserName@presetSQL:TestPassword@presetSQL.database.windows.net:1433/TestSchema` | -| [ClickHouse](/docs/configuration/databases#clickhouse) | `pip install clickhouse-connect` | `clickhousedb://{username}:{password}@{hostname}:{port}/{database}` | -| [Cloudflare D1](/docs/configuration/databases#cloudflare-d1) | `pip install superset-engine-d1` or `pip install apache_superset[d1]` | `d1://{cloudflare_account_id}:{cloudflare_api_token}@{cloudflare_d1_database_id}` | -| [CockroachDB](/docs/configuration/databases#cockroachdb) | `pip install cockroachdb` | `cockroachdb://root@{hostname}:{port}/{database}?sslmode=disable` | -| [Couchbase](/docs/configuration/databases#couchbase) | `pip install couchbase-sqlalchemy` | `couchbase://{username}:{password}@{hostname}:{port}?truststorepath={ssl certificate path}` | -| [CrateDB](/docs/configuration/databases#cratedb) | `pip install sqlalchemy-cratedb` | `crate://{username}:{password}@{hostname}:{port}`, often useful: `?ssl=true/false` or `?schema=testdrive`. | -| [Denodo](/docs/configuration/databases#denodo) | `pip install denodo-sqlalchemy` | `denodo://{username}:{password}@{hostname}:{port}/{database}` | -| [Dremio](/docs/configuration/databases#dremio) | `pip install sqlalchemy_dremio` |`dremio+flight://{username}:{password}@{host}:32010`, often useful: `?UseEncryption=true/false`. For Legacy ODBC: `dremio+pyodbc://{username}:{password}@{host}:31010` | -| [Elasticsearch](/docs/configuration/databases#elasticsearch) | `pip install elasticsearch-dbapi` | `elasticsearch+http://{user}:{password}@{host}:9200/` | -| [Exasol](/docs/configuration/databases#exasol) | `pip install sqlalchemy-exasol` | `exa+pyodbc://{username}:{password}@{hostname}:{port}/my_schema?CONNECTIONLCALL=en_US.UTF-8&driver=EXAODBC` | -| [Google BigQuery](/docs/configuration/databases#google-bigquery) | `pip install sqlalchemy-bigquery` | `bigquery://{project_id}` | -| [Google Sheets](/docs/configuration/databases#google-sheets) | `pip install shillelagh[gsheetsapi]` | `gsheets://` | -| [Firebolt](/docs/configuration/databases#firebolt) | `pip install firebolt-sqlalchemy` | `firebolt://{client_id}:{client_secret}@{database}/{engine_name}?account_name={name}` | -| [Hologres](/docs/configuration/databases#hologres) | `pip install psycopg2` | `postgresql+psycopg2://:@/` | -| [IBM Db2](/docs/configuration/databases#ibm-db2) | `pip install ibm_db_sa` | `db2+ibm_db://` | -| [IBM Netezza Performance Server](/docs/configuration/databases#ibm-netezza-performance-server) | `pip install nzalchemy` | `netezza+nzpy://:@/` | -| [MySQL](/docs/configuration/databases#mysql) | `pip install mysqlclient` | `mysql://:@/` | -| [OceanBase](/docs/configuration/databases#oceanbase) | `pip install oceanbase_py` | `oceanbase://:@/` | -| [Oracle](/docs/configuration/databases#oracle) | `pip install oracledb` | `oracle://:@:` | -| [Parseable](/docs/configuration/databases#parseable) | `pip install sqlalchemy-parseable` | `parseable://:@/` | -| [PostgreSQL](/docs/configuration/databases#postgres) | `pip install psycopg2` | `postgresql://:@/` | -| [Presto](/docs/configuration/databases#presto) | `pip install pyhive` | `presto://{username}:{password}@{hostname}:{port}/{database}` | -| [SAP Hana](/docs/configuration/databases#hana) | `pip install hdbcli sqlalchemy-hana` or `pip install apache_superset[hana]` | `hana://{username}:{password}@{host}:{port}` | -| [SingleStore](/docs/configuration/databases#singlestore) | `pip install sqlalchemy-singlestoredb` | `singlestoredb://{username}:{password}@{host}:{port}/{database}` | -| [StarRocks](/docs/configuration/databases#starrocks) | `pip install starrocks` | `starrocks://:@:/.` | -| [Snowflake](/docs/configuration/databases#snowflake) | `pip install snowflake-sqlalchemy` | `snowflake://{user}:{password}@{account}.{region}/{database}?role={role}&warehouse={warehouse}` | -| SQLite | No additional library needed | `sqlite://path/to/file.db?check_same_thread=false` | -| [SQL Server](/docs/configuration/databases#sql-server) | `pip install pymssql` | `mssql+pymssql://:@:/` | -| [TDengine](/docs/configuration/databases#tdengine) | `pip install taospy` `pip install taos-ws-py` | `taosws://:@:` | -| [Teradata](/docs/configuration/databases#teradata) | `pip install teradatasqlalchemy` | `teradatasql://{user}:{password}@{host}` | -| [TimescaleDB](/docs/configuration/databases#timescaledb) | `pip install psycopg2` | `postgresql://:@:/` | -| [Trino](/docs/configuration/databases#trino) | `pip install trino` | `trino://{username}:{password}@{hostname}:{port}/{catalog}` | -| [Vertica](/docs/configuration/databases#vertica) | `pip install sqlalchemy-vertica-python` | `vertica+vertica_python://:@/` | -| [YDB](/docs/configuration/databases#ydb) | `pip install ydb-sqlalchemy` | `ydb://{host}:{port}/{database_name}` | -| [YugabyteDB](/docs/configuration/databases#yugabytedb) | `pip install psycopg2` | `postgresql://:@/` | - ---- - -Note that many other databases are supported, the main criteria being the existence of a functional -SQLAlchemy dialect and Python driver. Searching for the keyword "sqlalchemy + (database name)" -should help get you to the right place. - -If your database or data engine isn't on the list but a SQL interface -exists, please file an issue on the -[Superset GitHub repo](https://github.com/apache/superset/issues), so we can work on documenting and -supporting it. - -:::resources -- [Tutorial: Building a Database Connector for Superset](https://preset.io/blog/building-database-connector/) -::: - -### Installing Drivers in Docker Images - -Superset requires a Python database driver to be installed for each additional -type of database you want to connect to. - -In this example, we'll walk through how to install the MySQL connector library. -The connector library installation process is the same for all additional libraries. - -#### 1. Determine the driver you need - -Consult the [list of database drivers](/docs/configuration/databases) -and find the PyPI package needed to connect to your database. In this example, we're connecting -to a MySQL database, so we'll need the `mysqlclient` connector library. - -#### 2. Install the driver in the container - -We need to get the `mysqlclient` library installed into the Superset docker container -(it doesn't matter if it's installed on the host machine). We could enter the running -container with `docker exec -it bash` and run `pip install mysqlclient` -there, but that wouldn't persist permanently. - -To address this, the Superset `docker compose` deployment uses the convention -of a `requirements-local.txt` file. All packages listed in this file will be installed -into the container from PyPI at runtime. This file will be ignored by Git for -the purposes of local development. - -Create the file `requirements-local.txt` in a subdirectory called `docker` that -exists in the directory with your `docker-compose.yml` or `docker-compose-non-dev.yml` file. - -```bash -# Run from the repo root: -touch ./docker/requirements-local.txt -``` - -Add the driver identified in step above. You can use a text editor or do -it from the command line like: - -```bash -echo "mysqlclient" >> ./docker/requirements-local.txt -``` - -**If you are running a stock (non-customized) Superset image**, you are done. -Launch Superset with `docker compose -f docker-compose-non-dev.yml up` and -the driver should be present. - -You can check its presence by entering the running container with -`docker exec -it bash` and running `pip freeze`. The PyPI package should -be present in the printed list. - -**If you're running a customized docker image**, rebuild your local image with the new -driver baked in: - -```bash -docker compose build --force-rm -``` - -After the rebuild of the Docker images is complete, relaunch Superset by -running `docker compose up`. - -#### 3. Connect to MySQL - -Now that you've got a MySQL driver installed in your container, you should be able to connect -to your database via the Superset web UI. - -As an admin user, go to Settings -> Data: Database Connections and click the +DATABASE button. -From there, follow the steps on the -[Using Database Connection UI page](/docs/configuration/databases#connecting-through-the-ui). - -Consult the page for your specific database type in the Superset documentation to determine -the connection string and any other parameters you need to input. For instance, -on the [MySQL page](/docs/configuration/databases#mysql), we see that the connection string -to a local MySQL database differs depending on whether the setup is running on Linux or Mac. - -Click the “Test Connection” button, which should result in a popup message saying, -"Connection looks good!". - -#### 4. Troubleshooting - -If the test fails, review your docker logs for error messages. Superset uses SQLAlchemy -to connect to databases; to troubleshoot the connection string for your database, you might -start Python in the Superset application container or host environment and try to connect -directly to the desired database and fetch data. This eliminates Superset for the -purposes of isolating the problem. - -Repeat this process for each type of database you want Superset to connect to. - -### Database-specific Instructions - -#### Ascend.io - -The recommended connector library to Ascend.io is [impyla](https://github.com/cloudera/impyla). - -The expected connection string is formatted as follows: - -``` -ascend://{username}:{password}@{hostname}:{port}/{database}?auth_mechanism=PLAIN;use_ssl=true -``` - -#### Apache Doris - -The [sqlalchemy-doris](https://pypi.org/project/pydoris/) library is the recommended way to connect to Apache Doris through SQLAlchemy. - -You'll need the following setting values to form the connection string: - -- **User**: User Name -- **Password**: Password -- **Host**: Doris FE Host -- **Port**: Doris FE port -- **Catalog**: Catalog Name -- **Database**: Database Name - -Here's what the connection string looks like: - -``` -doris://:@:/. -``` - -:::resources -- [Apache Doris Docs: Superset Integration](https://doris.apache.org/docs/ecosystem/bi/apache-superset/) -::: - -#### AWS Athena - -##### PyAthenaJDBC - -[PyAthenaJDBC](https://pypi.org/project/PyAthenaJDBC/) is a Python DB 2.0 compliant wrapper for the -[Amazon Athena JDBC driver](https://docs.aws.amazon.com/athena/latest/ug/connect-with-jdbc.html). - -The connection string for Amazon Athena is as follows: - -``` -awsathena+jdbc://{aws_access_key_id}:{aws_secret_access_key}@athena.{region_name}.amazonaws.com/{schema_name}?s3_staging_dir={s3_staging_dir}&... -``` - -Note that you'll need to escape & encode when forming the connection string like so: - -``` -s3://... -> s3%3A//... -``` - -##### PyAthena - -You can also use the [PyAthena library](https://pypi.org/project/PyAthena/) (no Java required) with the -following connection string: - -``` -awsathena+rest://{aws_access_key_id}:{aws_secret_access_key}@athena.{region_name}.amazonaws.com/{schema_name}?s3_staging_dir={s3_staging_dir}&... -``` - -The PyAthena library also allows to assume a specific IAM role which you can define by adding following parameters in Superset's Athena database connection UI under ADVANCED --> Other --> ENGINE PARAMETERS. - -```json -{ - "connect_args": { - "role_arn": "" - } -} -``` - -#### AWS DynamoDB - -##### PyDynamoDB - -[PyDynamoDB](https://pypi.org/project/PyDynamoDB/) is a Python DB API 2.0 (PEP 249) client for Amazon DynamoDB. - -The connection string for Amazon DynamoDB is as follows: - -``` -dynamodb://{aws_access_key_id}:{aws_secret_access_key}@dynamodb.{region_name}.amazonaws.com:443?connector=superset -``` - -To get more documentation, please visit: [PyDynamoDB WIKI](https://github.com/passren/PyDynamoDB/wiki/5.-Superset). - -#### AWS Redshift - -The [sqlalchemy-redshift](https://pypi.org/project/sqlalchemy-redshift/) library is the recommended -way to connect to Redshift through SQLAlchemy. - -This dialect requires either [redshift_connector](https://pypi.org/project/redshift-connector/) or [psycopg2](https://pypi.org/project/psycopg2/) to work properly. - -You'll need to set the following values to form the connection string: - -- **User Name**: userName -- **Password**: DBPassword -- **Database Host**: AWS Endpoint -- **Database Name**: Database Name -- **Port**: default 5439 - -##### psycopg2 - -Here's what the SQLALCHEMY URI looks like: - -``` -redshift+psycopg2://:@:5439/ -``` - -##### redshift_connector - -Here's what the SQLALCHEMY URI looks like: - -``` -redshift+redshift_connector://:@:5439/ -``` - -###### Using IAM-based credentials with Redshift cluster - -[Amazon redshift cluster](https://docs.aws.amazon.com/redshift/latest/mgmt/working-with-clusters.html) also supports generating temporary IAM-based database user credentials. - -Your superset app's [IAM role should have permissions](https://docs.aws.amazon.com/redshift/latest/mgmt/generating-iam-credentials-role-permissions.html) to call the `redshift:GetClusterCredentials` operation. - -You have to define the following arguments in Superset's redshift database connection UI under ADVANCED --> Others --> ENGINE PARAMETERS. - -``` -{"connect_args":{"iam":true,"database":"","cluster_identifier":"","db_user":""}} -``` - -and SQLALCHEMY URI should be set to `redshift+redshift_connector://` - -###### Using IAM-based credentials with Redshift serverless - -[Redshift serverless](https://docs.aws.amazon.com/redshift/latest/mgmt/serverless-whatis.html) supports connection using IAM roles. - -Your superset app's IAM role should have `redshift-serverless:GetCredentials` and `redshift-serverless:GetWorkgroup` permissions on Redshift serverless workgroup. - -You have to define the following arguments in Superset's redshift database connection UI under ADVANCED --> Others --> ENGINE PARAMETERS. - -``` -{"connect_args":{"iam":true,"is_serverless":true,"serverless_acct_id":"","serverless_work_group":"","database":"","user":"IAMR:"}} -``` - -#### ClickHouse - -To use ClickHouse with Superset, you will need to install the `clickhouse-connect` Python library: - -If running Superset using Docker Compose, add the following to your `./docker/requirements-local.txt` file: - -``` -clickhouse-connect>=0.6.8 -``` - -The recommended connector library for ClickHouse is -[clickhouse-connect](https://github.com/ClickHouse/clickhouse-connect). - -The expected connection string is formatted as follows: - -``` -clickhousedb://:@:/[?options…]clickhouse://{username}:{password}@{hostname}:{port}/{database} -``` - -Here's a concrete example of a real connection string: - -``` -clickhousedb://demo:demo@github.demo.trial.altinity.cloud/default?secure=true -``` - -If you're using Clickhouse locally on your computer, you can get away with using a http protocol URL that -uses the default user without a password (and doesn't encrypt the connection): - -``` -clickhousedb://localhost/default -``` - -:::resources -- [ClickHouse Docs: Superset Integration](https://clickhouse.com/docs/integrations/superset) -- [Altinity: Connect ClickHouse to Superset](https://docs.altinity.com/integrations/clickhouse-and-superset/connect-clickhouse-to-superset/) -- [Instaclustr: Connecting to ClickHouse from Superset](https://www.instaclustr.com/support/documentation/clickhouse/using-a-clickhouse-cluster/connecting-to-clickhouse-from-apache-superset/) -- [Blog: ClickHouse and Apache Superset](https://preset.io/blog/2021-5-26-clickhouse-superset/) -::: - -#### Cloudflare D1 - -To use Cloudflare D1 with superset, install the [superset-engine-d1](https://github.com/sqlalchemy-cf-d1/superset-engine-d1) library. - -``` -pip install superset-engine-d1 -``` - -The expected connection string is formatted as follows: - -``` -d1://{cloudflare_account_id}:{cloudflare_api_token}@{cloudflare_d1_database_id} -``` - -#### CockroachDB - -The recommended connector library for CockroachDB is -[sqlalchemy-cockroachdb](https://github.com/cockroachdb/sqlalchemy-cockroachdb). - -The expected connection string is formatted as follows: - -``` -cockroachdb://root@{hostname}:{port}/{database}?sslmode=disable -``` - -#### Couchbase - -The Couchbase's Superset connection is designed to support two services: Couchbase Analytics and Couchbase Columnar. -The recommended connector library for couchbase is -[couchbase-sqlalchemy](https://github.com/couchbase/couchbase-sqlalchemy). - -``` -pip install couchbase-sqlalchemy -``` - -The expected connection string is formatted as follows: - -``` -couchbase://{username}:{password}@{hostname}:{port}?truststorepath={certificate path}?ssl={true/false} -``` - -#### CrateDB - -The connector library for CrateDB is [sqlalchemy-cratedb]. -We recommend to add the following item to your `requirements.txt` file: - -``` -sqlalchemy-cratedb>=0.40.1,<1 -``` - -An SQLAlchemy connection string for [CrateDB Self-Managed] on localhost, -for evaluation purposes, looks like this: - -``` -crate://crate@127.0.0.1:4200 -``` - -An SQLAlchemy connection string for connecting to [CrateDB Cloud] looks like -this: - -``` -crate://:@.cratedb.net:4200/?ssl=true -``` - -Follow the steps [here](/docs/configuration/databases#installing-database-drivers) -to install the CrateDB connector package when setting up Superset locally using -Docker Compose. - -``` -echo "sqlalchemy-cratedb" >> ./docker/requirements-local.txt -``` - -:::resources -- [CrateDB Docs: Apache Superset Integration](https://cratedb.com/docs/guide/integrate/apache-superset/index.html) -- [Blog: Visualizing Time-Series Data with CrateDB and Superset](https://preset.io/blog/timeseries-cratedb-superset/) -::: - -[CrateDB Cloud]: https://cratedb.com/product/cloud -[CrateDB Self-Managed]: https://cratedb.com/product/self-managed -[sqlalchemy-cratedb]: https://pypi.org/project/sqlalchemy-cratedb/ - -#### Databend - -The recommended connector library for Databend is [databend-sqlalchemy](https://pypi.org/project/databend-sqlalchemy/). -Superset has been tested on `databend-sqlalchemy>=0.2.3`. - -The recommended connection string is: - -``` -databend://{username}:{password}@{host}:{port}/{database_name} -``` - -Here's a connection string example of Superset connecting to a Databend database: - -``` -databend://user:password@localhost:8000/default?secure=false -``` - -#### Databricks - -Databricks now offer a native DB API 2.0 driver, `databricks-sql-connector`, that can be used with the `sqlalchemy-databricks` dialect. You can install both with: - -```bash -pip install "apache-superset[databricks]" -``` - -To use the Hive connector you need the following information from your cluster: - -- Server hostname -- Port -- HTTP path - -These can be found under "Configuration" -> "Advanced Options" -> "JDBC/ODBC". - -You also need an access token from "Settings" -> "User Settings" -> "Access Tokens". - -Once you have all this information, add a database of type "Databricks Native Connector" and use the following SQLAlchemy URI: - -``` -databricks+connector://token:{access_token}@{server_hostname}:{port}/{database_name} -``` - -You also need to add the following configuration to "Other" -> "Engine Parameters", with your HTTP path: - -```json -{ - "connect_args": {"http_path": "sql/protocolv1/o/****"} -} -``` - -##### Older driver - -Originally Superset used `databricks-dbapi` to connect to Databricks. You might want to try it if you're having problems with the official Databricks connector: - -```bash -pip install "databricks-dbapi[sqlalchemy]" -``` - -There are two ways to connect to Databricks when using `databricks-dbapi`: using a Hive connector or an ODBC connector. Both ways work similarly, but only ODBC can be used to connect to [SQL endpoints](https://docs.databricks.com/sql/admin/sql-endpoints.html). - -#### Hive - -To connect to a Hive cluster add a database of type "Databricks Interactive Cluster" in Superset, and use the following SQLAlchemy URI: - -``` -databricks+pyhive://token:{access_token}@{server_hostname}:{port}/{database_name} -``` - -You also need to add the following configuration to "Other" -> "Engine Parameters", with your HTTP path: - -```json -{"connect_args": {"http_path": "sql/protocolv1/o/****"}} -``` - -#### ODBC - -For ODBC you first need to install the [ODBC drivers for your platform](https://databricks.com/spark/odbc-drivers-download). - -For a regular connection use this as the SQLAlchemy URI after selecting either "Databricks Interactive Cluster" or "Databricks SQL Endpoint" for the database, depending on your use case: - -``` -databricks+pyodbc://token:{access_token}@{server_hostname}:{port}/{database_name} -``` - -And for the connection arguments: - -```json -{"connect_args": {"http_path": "sql/protocolv1/o/****", "driver_path": "/path/to/odbc/driver"}} -``` - -The driver path should be: - -- `/Library/simba/spark/lib/libsparkodbc_sbu.dylib` (Mac OS) -- `/opt/simba/spark/lib/64/libsparkodbc_sb64.so` (Linux) - -For a connection to a SQL endpoint you need to use the HTTP path from the endpoint: - -```json -{"connect_args": {"http_path": "/sql/1.0/endpoints/****", "driver_path": "/path/to/odbc/driver"}} -``` - -#### Denodo - -The recommended connector library for Denodo is -[denodo-sqlalchemy](https://pypi.org/project/denodo-sqlalchemy/). - -The expected connection string is formatted as follows (default port is 9996): - -``` -denodo://{username}:{password}@{hostname}:{port}/{database} -``` - -#### Dremio - -The recommended connector library for Dremio is -[sqlalchemy_dremio](https://pypi.org/project/sqlalchemy-dremio/). - -The expected connection string for ODBC (Default port is 31010) is formatted as follows: - -``` -dremio+pyodbc://{username}:{password}@{host}:{port}/{database_name}/dremio?SSL=1 -``` - -The expected connection string for Arrow Flight (Dremio 4.9.1+. Default port is 32010) is formatted as follows: - -``` -dremio+flight://{username}:{password}@{host}:{port}/dremio -``` - -:::resources -- [Dremio Docs: Superset Integration](https://docs.dremio.com/current/client-applications/superset/) -- [Blog: Connecting Dremio to Apache Superset](https://www.dremio.com/tutorials/dremio-apache-superset/) -::: - -#### Apache Drill - -##### SQLAlchemy - -The recommended way to connect to Apache Drill is through SQLAlchemy. You can use the -[sqlalchemy-drill](https://github.com/JohnOmernik/sqlalchemy-drill) package. - -Once that is done, you can connect to Drill in two ways, either via the REST interface or by JDBC. -If you are connecting via JDBC, you must have the Drill JDBC Driver installed. - -The basic connection string for Drill looks like this: - -``` -drill+sadrill://:@:/?use_ssl=True -``` - -To connect to Drill running on a local machine running in embedded mode you can use the following -connection string: - -``` -drill+sadrill://localhost:8047/dfs?use_ssl=False -``` - -##### JDBC - -Connecting to Drill through JDBC is more complicated and we recommend following -[this tutorial](https://drill.apache.org/docs/using-the-jdbc-driver/). - -The connection string looks like: - -``` -drill+jdbc://:@: -``` - -##### ODBC - -We recommend reading the -[Apache Drill documentation](https://drill.apache.org/docs/installing-the-driver-on-linux/) and read -the [GitHub README](https://github.com/JohnOmernik/sqlalchemy-drill#usage-with-odbc) to learn how to -work with Drill through ODBC. - -:::resources -- [Tutorial: Query MongoDB in Superset with Apache Drill](https://medium.com/@thoren.lederer/query-data-from-mongodb-in-apache-superset-with-the-help-of-apache-drill-full-tutorial-b34c33eac6c1) -::: - -import useBaseUrl from "@docusaurus/useBaseUrl"; - -#### Apache Druid - -A native connector to Druid ships with Superset (behind the `DRUID_IS_ACTIVE` flag) but this is -slowly getting deprecated in favor of the SQLAlchemy / DBAPI connector made available in the -[pydruid library](https://pythonhosted.org/pydruid/). - -The connection string looks like: - -``` -druid://:@:/druid/v2/sql -``` - -Here's a breakdown of the key components of this connection string: - -- `User`: username portion of the credentials needed to connect to your database -- `Password`: password portion of the credentials needed to connect to your database -- `Host`: IP address (or URL) of the host machine that's running your database -- `Port`: specific port that's exposed on your host machine where your database is running - -##### Customizing Druid Connection - -When adding a connection to Druid, you can customize the connection a few different ways in the -**Add Database** form. - -**Custom Certificate** - -You can add certificates in the **Root Certificate** field when configuring the new database -connection to Druid: - -{" "} - -When using a custom certificate, pydruid will automatically use https scheme. - -**Disable SSL Verification** - -To disable SSL verification, add the following to the **Extras** field: - -``` -engine_params: -{"connect_args": - {"scheme": "https", "ssl_verify_cert": false}} -``` - -##### Aggregations - -Common aggregations or Druid metrics can be defined and used in Superset. The first and simpler use -case is to use the checkbox matrix exposed in your datasource’s edit view (**Sources -> Druid -Datasources -> [your datasource] -> Edit -> [tab] List Druid Column**). - -Clicking the GroupBy and Filterable checkboxes will make the column appear in the related dropdowns -while in the Explore view. Checking Count Distinct, Min, Max or Sum will result in creating new -metrics that will appear in the **List Druid Metric** tab upon saving the datasource. - -By editing these metrics, you’ll notice that their JSON element corresponds to Druid aggregation -definition. You can create your own aggregations manually from the **List Druid Metric** tab -following Druid documentation. - -##### Post-Aggregations - -Druid supports post aggregation and this works in Superset. All you have to do is create a metric, -much like you would create an aggregation manually, but specify `postagg` as a `Metric Type`. You -then have to provide a valid json post-aggregation definition (as specified in the Druid docs) in -the JSON field. - -:::resources -- [Blog: Real-Time Business Insights with Apache Druid and Superset](https://www.deep.bi/blog/real-time-business-insights-with-apache-druid-and-apache-superset) -::: - -#### Elasticsearch - -The recommended connector library for Elasticsearch is -[elasticsearch-dbapi](https://github.com/preset-io/elasticsearch-dbapi). - -The connection string for Elasticsearch looks like this: - -``` -elasticsearch+http://{user}:{password}@{host}:9200/ -``` - -**Using HTTPS** - -``` -elasticsearch+https://{user}:{password}@{host}:9200/ -``` - -Elasticsearch as a default limit of 10000 rows, so you can increase this limit on your cluster or -set Superset’s row limit on config - -``` -ROW_LIMIT = 10000 -``` - -You can query multiple indices on SQL Lab for example - -``` -SELECT timestamp, agent FROM "logstash" -``` - -But, to use visualizations for multiple indices you need to create an alias index on your cluster - -``` -POST /_aliases -{ - "actions" : [ - { "add" : { "index" : "logstash-**", "alias" : "logstash_all" } } - ] -} -``` - -Then register your table with the alias name logstash_all - -**Time zone** - -By default, Superset uses UTC time zone for elasticsearch query. If you need to specify a time zone, -please edit your Database and enter the settings of your specified time zone in the Other > ENGINE PARAMETERS: - -```json -{ - "connect_args": { - "time_zone": "Asia/Shanghai" - } -} -``` - -Another issue to note about the time zone problem is that before elasticsearch7.8, if you want to convert a string into a `DATETIME` object, -you need to use the `CAST` function,but this function does not support our `time_zone` setting. So it is recommended to upgrade to the version after elasticsearch7.8. -After elasticsearch7.8, you can use the `DATETIME_PARSE` function to solve this problem. -The DATETIME_PARSE function is to support our `time_zone` setting, and here you need to fill in your elasticsearch version number in the Other > VERSION setting. -the superset will use the `DATETIME_PARSE` function for conversion. - -**Disable SSL Verification** - -To disable SSL verification, add the following to the **SQLALCHEMY URI** field: - -``` -elasticsearch+https://{user}:{password}@{host}:9200/?verify_certs=False -``` - -:::resources -- [Blog: Superset Announces Elasticsearch Support](https://preset.io/blog/2019-12-16-elasticsearch-in-superset/) -::: - -#### Exasol - -The recommended connector library for Exasol is -[sqlalchemy-exasol](https://github.com/exasol/sqlalchemy-exasol). - -The connection string for Exasol looks like this: - -``` -exa+pyodbc://{username}:{password}@{hostname}:{port}/my_schema?CONNECTIONLCALL=en_US.UTF-8&driver=EXAODBC -``` - -#### Firebird - -The recommended connector library for Firebird is [sqlalchemy-firebird](https://pypi.org/project/sqlalchemy-firebird/). -Superset has been tested on `sqlalchemy-firebird>=0.7.0, <0.8`. - -The recommended connection string is: - -``` -firebird+fdb://{username}:{password}@{host}:{port}//{path_to_db_file} -``` - -Here's a connection string example of Superset connecting to a local Firebird database: - -``` -firebird+fdb://SYSDBA:masterkey@192.168.86.38:3050//Library/Frameworks/Firebird.framework/Versions/A/Resources/examples/empbuild/employee.fdb -``` - -#### Firebolt - -The recommended connector library for Firebolt is [firebolt-sqlalchemy](https://pypi.org/project/firebolt-sqlalchemy/). - -The recommended connection string is: - -``` -firebolt://{username}:{password}@{database}?account_name={name} -or -firebolt://{username}:{password}@{database}/{engine_name}?account_name={name} -``` - -It's also possible to connect using a service account: - -``` -firebolt://{client_id}:{client_secret}@{database}?account_name={name} -or -firebolt://{client_id}:{client_secret}@{database}/{engine_name}?account_name={name} -``` - -:::resources -- [Firebolt Docs: Connecting to Apache Superset](https://docs.firebolt.io/guides/integrations/connecting-to-apache-superset) -::: - -#### Google BigQuery - -The recommended connector library for BigQuery is -[sqlalchemy-bigquery](https://github.com/googleapis/python-bigquery-sqlalchemy). - -##### Install BigQuery Driver - -Follow the steps [here](/docs/configuration/databases#installing-drivers-in-docker-images) about how to -install new database drivers when setting up Superset locally via docker compose. - -```bash -echo "sqlalchemy-bigquery" >> ./docker/requirements-local.txt -``` - -##### Connecting to BigQuery - -When adding a new BigQuery connection in Superset, you'll need to add the GCP Service Account -credentials file (as a JSON). - -1. Create your Service Account via the Google Cloud Platform control panel, provide it access to the - appropriate BigQuery datasets, and download the JSON configuration file for the service account. -2. In Superset, you can either upload that JSON or add the JSON blob in the following format (this should be the content of your credential JSON file): - -```json -{ - "type": "service_account", - "project_id": "...", - "private_key_id": "...", - "private_key": "...", - "client_email": "...", - "client_id": "...", - "auth_uri": "...", - "token_uri": "...", - "auth_provider_x509_cert_url": "...", - "client_x509_cert_url": "..." - } -``` - -![CleanShot 2021-10-22 at 04 18 11](https://user-images.githubusercontent.com/52086618/138352958-a18ef9cb-8880-4ef1-88c1-452a9f1b8105.gif) - -3. Additionally, can connect via SQLAlchemy URI instead - - The connection string for BigQuery looks like: - - ``` - bigquery://{project_id} - ``` - - Go to the **Advanced** tab, Add a JSON blob to the **Secure Extra** field in the database configuration form with - the following format: - - ```json - { - "credentials_info": - } - ``` - - The resulting file should have this structure: - - ```json - { - "credentials_info": { - "type": "service_account", - "project_id": "...", - "private_key_id": "...", - "private_key": "...", - "client_email": "...", - "client_id": "...", - "auth_uri": "...", - "token_uri": "...", - "auth_provider_x509_cert_url": "...", - "client_x509_cert_url": "..." - } - } - ``` - -You should then be able to connect to your BigQuery datasets. - -![CleanShot 2021-10-22 at 04 47 08](https://user-images.githubusercontent.com/52086618/138354340-df57f477-d3e5-42d4-b032-d901c69d2213.gif) - -To be able to upload CSV or Excel files to BigQuery in Superset, you'll need to also add the -[pandas_gbq](https://github.com/pydata/pandas-gbq) library. - -Currently, the Google BigQuery Python SDK is not compatible with `gevent`, due to some dynamic monkeypatching on python core library by `gevent`. -So, when you deploy Superset with `gunicorn` server, you have to use worker type except `gevent`. - -:::resources -- [Tutorial: Build A StackOverflow Dashboard — Connecting Superset to BigQuery](https://preset.io/blog/2020-08-04-google-bigquery/) -::: - -#### Google Sheets - -Google Sheets has a very limited -[SQL API](https://developers.google.com/chart/interactive/docs/querylanguage). The recommended -connector library for Google Sheets is [shillelagh](https://github.com/betodealmeida/shillelagh). - -There are a few steps involved in connecting Superset to Google Sheets. - -:::resources -- [Tutorial: Connect Superset to Google Sheets](https://preset.io/blog/2020-06-01-connect-superset-google-sheets/) -::: - -#### Hana - -The recommended connector library is [sqlalchemy-hana](https://github.com/SAP/sqlalchemy-hana). - -The connection string is formatted as follows: - -``` -hana://{username}:{password}@{host}:{port} -``` - -#### Apache Hive - -The [pyhive](https://pypi.org/project/PyHive/) library is the recommended way to connect to Hive through SQLAlchemy. - -The expected connection string is formatted as follows: - -``` -hive://hive@{hostname}:{port}/{database} -``` - -:::resources -- [Cloudera: Connect Apache Hive to Superset](https://docs-archive.cloudera.com/HDPDocuments/HDP3/HDP-3.0.0/integrating-hive/content/hive_connect_hive_to_apache_superset.html) -::: - -#### Hologres - -Hologres is a real-time interactive analytics service developed by Alibaba Cloud. It is fully compatible with PostgreSQL 11 and integrates seamlessly with the big data ecosystem. - -Hologres sample connection parameters: - -- **User Name**: The AccessKey ID of your Alibaba Cloud account. -- **Password**: The AccessKey secret of your Alibaba Cloud account. -- **Database Host**: The public endpoint of the Hologres instance. -- **Database Name**: The name of the Hologres database. -- **Port**: The port number of the Hologres instance. - -The connection string looks like: - -``` -postgresql+psycopg2://{username}:{password}@{host}:{port}/{database} -``` - -#### IBM DB2 - -The [IBM_DB_SA](https://github.com/ibmdb/python-ibmdbsa/tree/master/ibm_db_sa) library provides a -Python / SQLAlchemy interface to IBM Data Servers. - -Here's the recommended connection string: - -``` -db2+ibm_db://{username}:{passport}@{hostname}:{port}/{database} -``` - -There are two DB2 dialect versions implemented in SQLAlchemy. If you are connecting to a DB2 version without `LIMIT [n]` syntax, the recommended connection string to be able to use the SQL Lab is: - -``` -ibm_db_sa://{username}:{passport}@{hostname}:{port}/{database} -``` - -#### Apache Impala - -The recommended connector library to Apache Impala is [impyla](https://github.com/cloudera/impyla). - -The expected connection string is formatted as follows: - -``` -impala://{hostname}:{port}/{database} -``` - -#### Kusto - -The recommended connector library for Kusto is -[sqlalchemy-kusto](https://pypi.org/project/sqlalchemy-kusto/2.0.0/)>=2.0.0. - -The connection string for Kusto (sql dialect) looks like this: - -``` -kustosql+https://{cluster_url}/{database}?azure_ad_client_id={azure_ad_client_id}&azure_ad_client_secret={azure_ad_client_secret}&azure_ad_tenant_id={azure_ad_tenant_id}&msi=False -``` - -The connection string for Kusto (kql dialect) looks like this: - -``` -kustokql+https://{cluster_url}/{database}?azure_ad_client_id={azure_ad_client_id}&azure_ad_client_secret={azure_ad_client_secret}&azure_ad_tenant_id={azure_ad_tenant_id}&msi=False -``` - -Make sure the user has privileges to access and use all required -databases/tables/views. - -#### Apache Kylin - -The recommended connector library for Apache Kylin is -[kylinpy](https://github.com/Kyligence/kylinpy). - -The expected connection string is formatted as follows: - -``` -kylin://:@:/?=&= -``` - -#### MySQL - -The recommended connector library for MySQL is [mysqlclient](https://pypi.org/project/mysqlclient/). - -Here's the connection string: - -``` -mysql://{username}:{password}@{host}/{database} -``` - -Host: - -- For Localhost: `localhost` or `127.0.0.1` -- Docker running on Linux: `172.18.0.1` -- For On Prem: IP address or Host name -- For Docker running in OSX: `docker.for.mac.host.internal` - Port: `3306` by default - -One problem with `mysqlclient` is that it will fail to connect to newer MySQL databases using `caching_sha2_password` for authentication, since the plugin is not included in the client. In this case, you should use [mysql-connector-python](https://pypi.org/project/mysql-connector-python/) instead: - -``` -mysql+mysqlconnector://{username}:{password}@{host}/{database} -``` - -#### IBM Netezza Performance Server - -The [nzalchemy](https://pypi.org/project/nzalchemy/) library provides a -Python / SQLAlchemy interface to IBM Netezza Performance Server (aka Netezza). - -Here's the recommended connection string: - -``` -netezza+nzpy://{username}:{password}@{hostname}:{port}/{database} -``` - -#### OceanBase - -The [sqlalchemy-oceanbase](https://pypi.org/project/oceanbase_py/) library is the recommended -way to connect to OceanBase through SQLAlchemy. - -The connection string for OceanBase looks like this: - -``` -oceanbase://:@:/ -``` - -#### Ocient DB - -The recommended connector library for Ocient is [sqlalchemy-ocient](https://pypi.org/project/sqlalchemy-ocient). - -##### Install the Ocient Driver - -```bash -pip install sqlalchemy-ocient -``` - -##### Connecting to Ocient - -The format of the Ocient DSN is: - -```shell -ocient://user:password@[host][:port][/database][?param1=value1&...] -``` - -#### Oracle - -The recommended connector library is -[cx_Oracle](https://cx-oracle.readthedocs.io/en/latest/user_guide/installation.html). - -The connection string is formatted as follows: - -``` -oracle://:@: -``` - -:::resources -- [Oracle Developers: Steps to use Apache Superset and Oracle Database](https://medium.com/oracledevs/steps-to-use-apache-superset-and-oracle-database-ae0858b4f134) -::: - -#### Parseable - -[Parseable](https://www.parseable.io) is a distributed log analytics database that provides SQL-like query interface for log data. The recommended connector library is [sqlalchemy-parseable](https://github.com/parseablehq/sqlalchemy-parseable). - -The connection string is formatted as follows: - -``` -parseable://:@:/ -``` - -For example: - -``` -parseable://admin:admin@demo.parseable.com:443/ingress-nginx -``` - -Note: The stream_name in the URI represents the Parseable logstream you want to query. You can use both HTTP (port 80) and HTTPS (port 443) connections. - -:::resources -- [Blog: Parseable with Apache Superset](https://www.parseable.com/blog/parseable-with-apache-superset) -::: - -#### Apache Pinot - -The recommended connector library for Apache Pinot is [pinotdb](https://pypi.org/project/pinotdb/). - -The expected connection string is formatted as follows: - -``` -pinot+http://:/query?controller=http://:/`` -``` - -The expected connection string using username and password is formatted as follows: - -``` -pinot://:@:/query/sql?controller=http://:/verify_ssl=true`` -``` - -If you want to use explore view or joins, window functions, etc. then enable [multi-stage query engine](https://docs.pinot.apache.org/reference/multi-stage-engine). -Add below argument while creating database connection in Advanced -> Other -> ENGINE PARAMETERS - -```json -{"connect_args":{"use_multistage_engine":"true"}} -``` - -:::resources -- [Apache Pinot Docs: Superset Integration](https://docs.pinot.apache.org/integrations/superset) -- [StarTree: Data Visualization with Apache Superset and Pinot](https://startree.ai/resources/data-visualization-with-apache-superset-and-pinot) -::: - -#### Postgres - -Note that, if you're using docker compose, the Postgres connector library [psycopg2](https://www.psycopg.org/docs/) -comes out of the box with Superset. - -Postgres sample connection parameters: - -- **User Name**: UserName -- **Password**: DBPassword -- **Database Host**: - - For Localhost: localhost or 127.0.0.1 - - For On Prem: IP address or Host name - - For AWS Endpoint -- **Database Name**: Database Name -- **Port**: default 5432 - -The connection string looks like: - -``` -postgresql://{username}:{password}@{host}:{port}/{database} -``` - -You can require SSL by adding `?sslmode=require` at the end: - -``` -postgresql://{username}:{password}@{host}:{port}/{database}?sslmode=require -``` - -You can read about the other SSL modes that Postgres supports in -[Table 31-1 from this documentation](https://www.postgresql.org/docs/9.1/libpq-ssl.html). - -More information about PostgreSQL connection options can be found in the -[SQLAlchemy docs](https://docs.sqlalchemy.org/en/13/dialects/postgresql.html#module-sqlalchemy.dialects.postgresql.psycopg2) -and the -[PostgreSQL docs](https://www.postgresql.org/docs/9.1/libpq-connect.html#LIBPQ-PQCONNECTDBPARAMS). - -:::resources -- [Blog: Data Visualization in PostgreSQL With Apache Superset](https://www.tigerdata.com/blog/data-visualization-in-postgresql-with-apache-superset) -::: - -#### QuestDB - -[QuestDB](https://questdb.io/) is a high-performance, open-source time-series database with SQL support. -The recommended connector library is the PostgreSQL driver [psycopg2](https://www.psycopg.org/docs/), -as QuestDB supports the PostgreSQL wire protocol. - -The connection string is formatted as follows: - -``` -postgresql+psycopg2://{username}:{password}@{hostname}:{port}/{database} -``` - -The default port for QuestDB's PostgreSQL interface is `8812`. - -:::resources -- [QuestDB Docs: Apache Superset Integration](https://questdb.com/docs/third-party-tools/superset/) -::: - -#### Presto - -The [pyhive](https://pypi.org/project/PyHive/) library is the recommended way to connect to Presto through SQLAlchemy. - -The expected connection string is formatted as follows: - -``` -presto://{hostname}:{port}/{database} -``` - -You can pass in a username and password as well: - -``` -presto://{username}:{password}@{hostname}:{port}/{database} -``` - -Here is an example connection string with values: - -``` -presto://datascientist:securepassword@presto.example.com:8080/hive -``` - -By default Superset assumes the most recent version of Presto is being used when querying the -datasource. If you’re using an older version of Presto, you can configure it in the extra parameter: - -```json -{ - "version": "0.123" -} -``` - -SSL Secure extra add json config to extra connection information. - -```json - { - "connect_args": - {"protocol": "https", - "requests_kwargs":{"verify":false} - } -} -``` - -:::resources -- [Tutorial: Presto SQL + S3 Data + Superset Data Lake](https://hackernoon.com/presto-sql-s3-data-superset-data-lake) -::: - -#### RisingWave - -The recommended connector library for RisingWave is -[sqlalchemy-risingwave](https://github.com/risingwavelabs/sqlalchemy-risingwave). - -The expected connection string is formatted as follows: - -``` -risingwave://root@{hostname}:{port}/{database}?sslmode=disable -``` - -#### Snowflake - -##### Install Snowflake Driver - -Follow the steps [here](/docs/configuration/databases#installing-database-drivers) about how to -install new database drivers when setting up Superset locally via docker compose. - -```bash -echo "snowflake-sqlalchemy" >> ./docker/requirements-local.txt -``` - -The recommended connector library for Snowflake is -[snowflake-sqlalchemy](https://pypi.org/project/snowflake-sqlalchemy/). - -The connection string for Snowflake looks like this: - -``` -snowflake://{user}:{password}@{account}.{region}/{database}?role={role}&warehouse={warehouse} -``` - -The schema is not necessary in the connection string, as it is defined per table/query. The role and -warehouse can be omitted if defaults are defined for the user, i.e. - -``` -snowflake://{user}:{password}@{account}.{region}/{database} -``` - -Make sure the user has privileges to access and use all required -databases/schemas/tables/views/warehouses, as the Snowflake SQLAlchemy engine does not test for -user/role rights during engine creation by default. However, when pressing the “Test Connection” -button in the Create or Edit Database dialog, user/role credentials are validated by passing -“validate_default_parameters”: True to the connect() method during engine creation. If the user/role -is not authorized to access the database, an error is recorded in the Superset logs. - -And if you want connect Snowflake with [Key Pair Authentication](https://docs.snowflake.com/en/user-guide/key-pair-auth.html#step-6-configure-the-snowflake-client-to-use-key-pair-authentication). -Please make sure you have the key pair and the public key is registered in Snowflake. -To connect Snowflake with Key Pair Authentication, you need to add the following parameters to "SECURE EXTRA" field. - -***Please note that you need to merge multi-line private key content to one line and insert `\n` between each line*** - -```json -{ - "auth_method": "keypair", - "auth_params": { - "privatekey_body": "-----BEGIN ENCRYPTED PRIVATE KEY-----\n...\n...\n-----END ENCRYPTED PRIVATE KEY-----", - "privatekey_pass":"Your Private Key Password" - } - } -``` - -If your private key is stored on server, you can replace "privatekey_body" with “privatekey_path” in parameter. - -```json -{ - "auth_method": "keypair", - "auth_params": { - "privatekey_path":"Your Private Key Path", - "privatekey_pass":"Your Private Key Password" - } -} -``` - -:::resources -- [Snowflake Builders Blog: Building Real-Time Operational Dashboards with Apache Superset and Snowflake](https://medium.com/snowflake/building-real-time-operational-dashboards-with-apache-superset-and-snowflake-23f625e07d7c) -::: - -#### Apache Solr - -The [sqlalchemy-solr](https://pypi.org/project/sqlalchemy-solr/) library provides a -Python / SQLAlchemy interface to Apache Solr. - -The connection string for Solr looks like this: - -``` -solr://{username}:{password}@{host}:{port}/{server_path}/{collection}[/?use_ssl=true|false] -``` - -#### Apache Spark SQL - -The recommended connector library for Apache Spark SQL [pyhive](https://pypi.org/project/PyHive/). - -The expected connection string is formatted as follows: - -``` -hive://hive@{hostname}:{port}/{database} -``` - -:::resources -- [Tutorial: How to Connect Apache Superset with Apache SparkSQL](https://medium.com/free-or-open-source-software/how-to-connect-apache-superset-with-apache-sparksql-50efe48ac0e4) -::: - -#### Arc - -There are two ways to connect Superset to Arc: - -**1. Arc with Apache Arrow (Recommended)** - -The recommended connector library for Arc with Apache Arrow support is [arc-superset-arrow](https://pypi.org/project/arc-superset-arrow/). - -The connection string looks like: - -``` -arc+arrow://{api_key}@{hostname}:{port}/{database} -``` - -**2. Arc with JSON** - -Alternatively, you can use the JSON connector [arc-superset-dialect](https://pypi.org/project/arc-superset-dialect/). - -The connection string looks like: - -``` -arc+json://{api_key}@{hostname}:{port}/{database} -``` - -Arc supports multiple databases (schemas) within a single instance. In Superset, each Arc database appears as a schema in the SQL Lab. - -**Note:** The Arrow dialect (`arc-superset-arrow`) is recommended for production use as it provides 3-5x better performance using Apache Arrow IPC binary format. - -#### SQL Server - -The recommended connector library for SQL Server is [pymssql](https://github.com/pymssql/pymssql). - -The connection string for SQL Server looks like this: - -``` -mssql+pymssql://:@:/ -``` - -It is also possible to connect using [pyodbc](https://pypi.org/project/pyodbc) with the parameter [odbc_connect](https://docs.sqlalchemy.org/en/14/dialects/mssql.html#pass-through-exact-pyodbc-string) - -The connection string for SQL Server looks like this: - -``` -mssql+pyodbc:///?odbc_connect=Driver%3D%7BODBC+Driver+17+for+SQL+Server%7D%3BServer%3Dtcp%3A%3Cmy_server%3E%2C1433%3BDatabase%3Dmy_database%3BUid%3Dmy_user_name%3BPwd%3Dmy_password%3BEncrypt%3Dyes%3BConnection+Timeout%3D30 -``` - -:::note -You might have noticed that some special charecters are used in the above connection string. For example see the `odbc_connect` parameter. The value is `Driver%3D%7BODBC+Driver+17+for+SQL+Server%7D%3B` which is a URL-encoded form of `Driver={ODBC+Driver+17+for+SQL+Server};`. It's important to give the connection string is URL encoded. - -For more information about this check the [sqlalchemy documentation](https://docs.sqlalchemy.org/en/20/core/engines.html#escaping-special-characters-such-as-signs-in-passwords). Which says `When constructing a fully formed URL string to pass to create_engine(), special characters such as those that may be used in the user and password need to be URL encoded to be parsed correctly. This includes the @ sign.` -::: - -#### SingleStore - -The recommended connector library for SingleStore is -[sqlalchemy-singlestoredb](https://github.com/singlestore-labs/sqlalchemy-singlestoredb). - -The expected connection string is formatted as follows: - -``` -singlestoredb://{username}:{password}@{host}:{port}/{database} -``` - -#### StarRocks - -The [sqlalchemy-starrocks](https://pypi.org/project/starrocks/) library is the recommended -way to connect to StarRocks through SQLAlchemy. - -You'll need to the following setting values to form the connection string: - -- **User**: User Name -- **Password**: DBPassword -- **Host**: StarRocks FE Host -- **Catalog**: Catalog Name -- **Database**: Database Name -- **Port**: StarRocks FE port - -Here's what the connection string looks like: - -``` -starrocks://:@:/. -``` - -:::resources -- [StarRocks Docs: Superset Integration](https://docs.starrocks.io/docs/integrations/BI_integrations/Superset/) -::: - -#### TDengine - -[TDengine](https://www.tdengine.com) is a High-Performance, Scalable Time-Series Database for Industrial IoT and provides SQL-like query interface. - -The recommended connector library for TDengine is [taospy](https://pypi.org/project/taospy/) and [taos-ws-py](https://pypi.org/project/taos-ws-py/) - -The expected connection string is formatted as follows: - -``` -taosws://:@: -``` - -For example: - -``` -taosws://root:taosdata@127.0.0.1:6041 -``` - -#### Teradata - -The recommended connector library is -[teradatasqlalchemy](https://pypi.org/project/teradatasqlalchemy/). - -The connection string for Teradata looks like this: - -``` -teradatasql://{user}:{password}@{host} -``` - -#### ODBC Driver - -There's also an older connector named - [sqlalchemy-teradata](https://github.com/Teradata/sqlalchemy-teradata) that - requires the installation of ODBC drivers. The Teradata ODBC Drivers - are available -here: https://downloads.teradata.com/download/connectivity/odbc-driver/linux - -Here are the required environment variables: - -```bash -export ODBCINI=/.../teradata/client/ODBC_64/odbc.ini -export ODBCINST=/.../teradata/client/ODBC_64/odbcinst.ini -``` - -We recommend using the first library because of the - lack of requirement around ODBC drivers and - because it's more regularly updated. - -#### TimescaleDB - -[TimescaleDB](https://www.timescale.com) is the open-source relational database for time-series and analytics to build powerful data-intensive applications. -TimescaleDB is a PostgreSQL extension, and you can use the standard PostgreSQL connector library, [psycopg2](https://www.psycopg.org/docs/), to connect to the database. - -If you're using docker compose, psycopg2 comes out of the box with Superset. - -TimescaleDB sample connection parameters: - -- **User Name**: User -- **Password**: Password -- **Database Host**: - - For Localhost: localhost or 127.0.0.1 - - For On Prem: IP address or Host name - - For [Timescale Cloud](https://console.cloud.timescale.com) service: Host name - - For [Managed Service for TimescaleDB](https://portal.managed.timescale.com) service: Host name -- **Database Name**: Database Name -- **Port**: default 5432 or Port number of the service - -The connection string looks like: - -``` -postgresql://{username}:{password}@{host}:{port}/{database name} -``` - -You can require SSL by adding `?sslmode=require` at the end (e.g. in case you use [Timescale Cloud](https://www.timescale.com/cloud)): - -``` -postgresql://{username}:{password}@{host}:{port}/{database name}?sslmode=require -``` - -[Learn more about TimescaleDB!](https://docs.timescale.com/) - -:::resources -- [Timescale DevRel: Visualize time series data with TimescaleDB and Apache Superset](https://attilatoth.dev/speaking/timescaledb-superset/) -- [Tutorial: PostgreSQL with TimescaleDB — Visualizing Real-Time Data with Superset](https://www.slingacademy.com/article/postgresql-with-timescaledb-visualizing-real-time-data-with-superset/) -::: - -#### Trino - -Supported trino version 352 and higher - -##### Connection String - -The connection string format is as follows: - -``` -trino://{username}:{password}@{hostname}:{port}/{catalog} -``` - -If you are running Trino with docker on local machine, please use the following connection URL - -``` -trino://trino@host.docker.internal:8080 -``` - -##### Authentications - -###### 1. Basic Authentication - -You can provide `username`/`password` in the connection string or in the `Secure Extra` field at `Advanced / Security` - -- In Connection String - - ``` - trino://{username}:{password}@{hostname}:{port}/{catalog} - ``` - -- In `Secure Extra` field - - ```json - { - "auth_method": "basic", - "auth_params": { - "username": "", - "password": "" - } - } - ``` - -NOTE: if both are provided, `Secure Extra` always takes higher priority. - -###### 2. Kerberos Authentication - -In `Secure Extra` field, config as following example: - -```json -{ - "auth_method": "kerberos", - "auth_params": { - "service_name": "superset", - "config": "/path/to/krb5.config", - ... - } -} -``` - -All fields in `auth_params` are passed directly to the [`KerberosAuthentication`](https://github.com/trinodb/trino-python-client/blob/0.306.0/trino/auth.py#L40) class. - -NOTE: Kerberos authentication requires installing the [`trino-python-client`](https://github.com/trinodb/trino-python-client) locally with either the `all` or `kerberos` optional features, i.e., installing `trino[all]` or `trino[kerberos]` respectively. - -###### 3. Certificate Authentication - -In `Secure Extra` field, config as following example: - -```json -{ - "auth_method": "certificate", - "auth_params": { - "cert": "/path/to/cert.pem", - "key": "/path/to/key.pem" - } -} -``` - -All fields in `auth_params` are passed directly to the [`CertificateAuthentication`](https://github.com/trinodb/trino-python-client/blob/0.315.0/trino/auth.py#L416) class. - -###### 4. JWT Authentication - -Config `auth_method` and provide token in `Secure Extra` field - -```json -{ - "auth_method": "jwt", - "auth_params": { - "token": "" - } -} -``` - -###### 5. Custom Authentication - -To use custom authentication, first you need to add it into -`ALLOWED_EXTRA_AUTHENTICATIONS` allow list in Superset config file: - -```python -from your.module import AuthClass -from another.extra import auth_method - -ALLOWED_EXTRA_AUTHENTICATIONS: Dict[str, Dict[str, Callable[..., Any]]] = { - "trino": { - "custom_auth": AuthClass, - "another_auth_method": auth_method, - }, -} -``` - -Then in `Secure Extra` field: - -```json -{ - "auth_method": "custom_auth", - "auth_params": { - ... - } -} -``` - -You can also use custom authentication by providing reference to your `trino.auth.Authentication` class -or factory function (which returns an `Authentication` instance) to `auth_method`. - -All fields in `auth_params` are passed directly to your class/function. - -:::resources -- [Starburst Docs: Superset Integration](https://docs.starburst.io/clients/superset.html) -- [Podcast: Trino and Superset](https://trino.io/episodes/12.html) -- [Blog: Trino and Apache Superset](https://preset.io/blog/2021-6-22-trino-superset/) -::: - -#### Vertica - -The recommended connector library is -[sqlalchemy-vertica-python](https://pypi.org/project/sqlalchemy-vertica-python/). The -[Vertica](http://www.vertica.com/) connection parameters are: - -- **User Name:** UserName -- **Password:** DBPassword -- **Database Host:** - - For Localhost : localhost or 127.0.0.1 - - For On Prem : IP address or Host name - - For Cloud: IP Address or Host Name -- **Database Name:** Database Name -- **Port:** default 5433 - -The connection string is formatted as follows: - -``` -vertica+vertica_python://{username}:{password}@{host}/{database} -``` - -Other parameters: - -- Load Balancer - Backup Host - -#### YDB - -The recommended connector library for [YDB](https://ydb.tech/) is -[ydb-sqlalchemy](https://pypi.org/project/ydb-sqlalchemy/). - -##### Connection String - -The connection string for YDB looks like this: - -``` -ydb://{host}:{port}/{database_name} -``` - -##### Protocol - -You can specify `protocol` in the `Secure Extra` field at `Advanced / Security`: - -``` -{ - "protocol": "grpcs" -} -``` - -Default is `grpc`. - -##### Authentication Methods - -###### Static Credentials - -To use `Static Credentials` you should provide `username`/`password` in the `Secure Extra` field at `Advanced / Security`: - -``` -{ - "credentials": { - "username": "...", - "password": "..." - } -} -``` - -###### Access Token Credentials - -To use `Access Token Credentials` you should provide `token` in the `Secure Extra` field at `Advanced / Security`: - -``` -{ - "credentials": { - "token": "...", - } -} -``` - -##### Service Account Credentials - -To use Service Account Credentials, you should provide `service_account_json` in the `Secure Extra` field at `Advanced / Security`: - -``` -{ - "credentials": { - "service_account_json": { - "id": "...", - "service_account_id": "...", - "created_at": "...", - "key_algorithm": "...", - "public_key": "...", - "private_key": "..." - } - } -} -``` - -#### YugabyteDB - -[YugabyteDB](https://www.yugabyte.com/) is a distributed SQL database built on top of PostgreSQL. - -Note that, if you're using docker compose, the -Postgres connector library [psycopg2](https://www.psycopg.org/docs/) -comes out of the box with Superset. - -The connection string looks like: - -``` -postgresql://{username}:{password}@{host}:{port}/{database} -``` - -:::resources -- [Blog: Introduction to YugabyteDB and Apache Superset](https://preset.io/blog/introduction-yugabytedb-apache-superset/) -::: - -## Connecting through the UI - -Here is the documentation on how to leverage the new DB Connection UI. This will provide admins the ability to enhance the UX for users who want to connect to new databases. - -![db-conn-docs](https://user-images.githubusercontent.com/27827808/125499607-94e300aa-1c0f-4c60-b199-3f9de41060a3.gif) - -There are now 3 steps when connecting to a database in the new UI: - -Step 1: First the admin must inform superset what engine they want to connect to. This page is powered by the `/available` endpoint which pulls on the engines currently installed in your environment, so that only supported databases are shown. - -Step 2: Next, the admin is prompted to enter database specific parameters. Depending on whether there is a dynamic form available for that specific engine, the admin will either see the new custom form or the legacy SQLAlchemy form. We currently have built dynamic forms for (Redshift, MySQL, Postgres, and BigQuery). The new form prompts the user for the parameters needed to connect (for example, username, password, host, port, etc.) and provides immediate feedback on errors. - -Step 3: Finally, once the admin has connected to their DB using the dynamic form they have the opportunity to update any optional advanced settings. - -We hope this feature will help eliminate a huge bottleneck for users to get into the application and start crafting datasets. - -##### How to setup up preferred database options and images - -We added a new configuration option where the admin can define their preferred databases, in order: - -```python -# A list of preferred databases, in order. These databases will be -# displayed prominently in the "Add Database" dialog. You should -# use the "engine_name" attribute of the corresponding DB engine spec -# in `superset/db_engine_specs/`. -PREFERRED_DATABASES: list[str] = [ - "PostgreSQL", - "Presto", - "MySQL", - "SQLite", -] -``` - -For copyright reasons the logos for each database are not distributed with Superset. - -##### Setting images - -- To set the images of your preferred database, admins must create a mapping in the `superset_text.yml` file with engine and location of the image. The image can be host locally inside your static/file directory or online (e.g. S3) - -```python -DB_IMAGES: - postgresql: "path/to/image/postgres.jpg" - bigquery: "path/to/s3bucket/bigquery.jpg" - snowflake: "path/to/image/snowflake.jpg" -``` - -##### How to add new database engines to available endpoint - -Currently the new modal supports the following databases: - -- Postgres -- Redshift -- MySQL -- BigQuery - -When the user selects a database not in this list they will see the old dialog asking for the SQLAlchemy URI. New databases can be added gradually to the new flow. In order to support the rich configuration a DB engine spec needs to have the following attributes: - -1. `parameters_schema`: a Marshmallow schema defining the parameters needed to configure the database. For Postgres this includes username, password, host, port, etc. ([see](https://github.com/apache/superset/blob/accee507c0819cd0d7bcfb5a3e1199bc81eeebf2/superset/db_engine_specs/base.py#L1309-L1320)). -2. `default_driver`: the name of the recommended driver for the DB engine spec. Many SQLAlchemy dialects support multiple drivers, but usually one is the official recommendation. For Postgres we use "psycopg2". -3. `sqlalchemy_uri_placeholder`: a string that helps the user in case they want to type the URI directly. -4. `encryption_parameters`: parameters used to build the URI when the user opts for an encrypted connection. For Postgres this is `{"sslmode": "require"}`. - -In addition, the DB engine spec must implement these class methods: - -- `build_sqlalchemy_uri(cls, parameters, encrypted_extra)`: this method receives the distinct parameters and builds the URI from them. -- `get_parameters_from_uri(cls, uri, encrypted_extra)`: this method does the opposite, extracting the parameters from a given URI. -- `validate_parameters(cls, parameters)`: this method is used for `onBlur` validation of the form. It should return a list of `SupersetError` indicating which parameters are missing, and which parameters are definitely incorrect ([example](https://github.com/apache/superset/blob/accee507c0819cd0d7bcfb5a3e1199bc81eeebf2/superset/db_engine_specs/base.py#L1404)). - -For databases like MySQL and Postgres that use the standard format of `engine+driver://user:password@host:port/dbname` all you need to do is add the `BasicParametersMixin` to the DB engine spec, and then define the parameters 2-4 (`parameters_schema` is already present in the mixin). - -For other databases you need to implement these methods yourself. The BigQuery DB engine spec is a good example of how to do that. - -### Extra Database Settings - -##### Deeper SQLAlchemy Integration - -It is possible to tweak the database connection information using the parameters exposed by -SQLAlchemy. In the **Database edit** view, you can edit the **Extra** field as a JSON blob. - -This JSON string contains extra configuration elements. The `engine_params` object gets unpacked -into the `sqlalchemy.create_engine` call, while the `metadata_params` get unpacked into the -`sqlalchemy.MetaData` call. Refer to the SQLAlchemy docs for more information. - -##### Schemas - -Databases like Postgres and Redshift use the **schema** as the logical entity on top of the -**database**. For Superset to connect to a specific schema, you can set the **schema** parameter in -the **Edit Tables** form (Sources > Tables > Edit record). - -##### External Password Store for SQLAlchemy Connections - -Superset can be configured to use an external store for database passwords. This is useful if you a -running a custom secret distribution framework and do not wish to store secrets in Superset’s meta -database. - -Example: Write a function that takes a single argument of type `sqla.engine.url` and returns the -password for the given connection string. Then set `SQLALCHEMY_CUSTOM_PASSWORD_STORE` in your config -file to point to that function. - -```python -def example_lookup_password(url): - secret = <> - return 'secret' - -SQLALCHEMY_CUSTOM_PASSWORD_STORE = example_lookup_password -``` - -A common pattern is to use environment variables to make secrets available. -`SQLALCHEMY_CUSTOM_PASSWORD_STORE` can also be used for that purpose. - -```python -def example_password_as_env_var(url): - # assuming the uri looks like - # mysql://localhost?superset_user:{SUPERSET_PASSWORD} - return url.password.format(**os.environ) - -SQLALCHEMY_CUSTOM_PASSWORD_STORE = example_password_as_env_var -``` - -##### SSL Access to Databases - -You can use the `Extra` field in the **Edit Databases** form to configure SSL: - -```JSON -{ - "metadata_params": {}, - "engine_params": { - "connect_args":{ - "sslmode":"require", - "sslrootcert": "/path/to/my/pem" - } - } -} -``` -##### Custom Error Messages -You can use the `CUSTOM_DATABASE_ERRORS` in the `superset/custom_database_errors.py` file or overwrite it in your config file to configure custom error messages for database exceptions. - -This feature lets you transform raw database errors into user-friendly messages, optionally including documentation links and hiding default error codes. - -Provide an empty string as the first value to keep the original error message. This way, you can add just a link to the documentation -**Example usage:** -```Python -CUSTOM_DATABASE_ERRORS = { - "database_name": { - re.compile('permission denied for view'): ( - __( - 'Permission denied' - ), - SupersetErrorType.GENERIC_DB_ENGINE_ERROR, - { - "custom_doc_links": [ - { - "url": "https://example.com/docs/1", - "label": "Check documentation" - }, - ], - "show_issue_info": False, - } - ) - }, - "examples": { - re.compile(r'message="(?P[^"]*)"'): ( - __( - 'Unexpected error: "%(message)s"' - ), - SupersetErrorType.GENERIC_DB_ENGINE_ERROR, - {} - ) - } -} -``` - -**Options:** - -- ``custom_doc_links``: List of documentation links to display with the error. -- ``show_issue_info``: Set to ``False`` to hide default error codes. - -## Misc - -### Querying across databases - -Superset offers an experimental feature for querying across different databases. This is done via a special database called "Superset meta database" that uses the "superset://" SQLAlchemy URI. When using the database it's possible to query any table in any of the configured databases using the following syntax: - -```sql -SELECT * FROM "database name.[[catalog.].schema].table name"; -``` - -For example: - -```sql -SELECT * FROM "examples.birth_names"; -``` - -Spaces are allowed, but periods in the names must be replaced by `%2E`. Eg: - -```sql -SELECT * FROM "Superset meta database.examples%2Ebirth_names"; -``` - -The query above returns the same rows as `SELECT * FROM "examples.birth_names"`, and also shows that the meta database can query tables from any table — even itself! - -#### Considerations - -Before enabling this feature, there are a few considerations that you should have in mind. First, the meta database enforces permissions on the queried tables, so users should only have access via the database to tables that they originally have access to. Nevertheless, the meta database is a new surface for potential attacks, and bugs could allow users to see data they should not. - -Second, there are performance considerations. The meta database will push any filtering, sorting, and limiting to the underlying databases, but any aggregations and joins will happen in memory in the process running the query. Because of this, it's recommended to run the database in async mode, so queries are executed in Celery workers, instead of the web workers. Additionally, it's possible to specify a hard limit on how many rows are returned from the underlying databases. - -#### Enabling the meta database - -To enable the Superset meta database, first you need to set the `ENABLE_SUPERSET_META_DB` feature flag to true. Then, add a new database of type "Superset meta database" with the SQLAlchemy URI "superset://". - -If you enable DML in the meta database users will be able to run DML queries on underlying databases **as long as DML is also enabled in them**. This allows users to run queries that move data across databases. - -Second, you might want to change the value of `SUPERSET_META_DB_LIMIT`. The default value is 1000, and defines how many are read from each database before any aggregations and joins are executed. You can also set this value `None` if you only have small tables. - -Additionally, you might want to restrict the databases to with the meta database has access to. This can be done in the database configuration, under "Advanced" -> "Other" -> "ENGINE PARAMETERS" and adding: - -```json -{"allowed_dbs":["Google Sheets","examples"]} -``` diff --git a/docs/docusaurus.config.ts b/docs/docusaurus.config.ts index cfb4121457c9..c9b0e8c28c55 100644 --- a/docs/docusaurus.config.ts +++ b/docs/docusaurus.config.ts @@ -222,7 +222,7 @@ const config: Config = { from: '/gallery.html', }, { - to: '/docs/configuration/databases', + to: '/docs/databases', from: '/druid.html', }, { @@ -274,7 +274,7 @@ const config: Config = { from: '/docs/contributing/contribution-page', }, { - to: '/docs/configuration/databases', + to: '/docs/databases', from: '/docs/databases/yugabyte/', }, { @@ -410,6 +410,11 @@ const config: Config = { docId: 'intro', label: 'Getting Started', }, + { + type: 'doc', + docId: 'databases/index', + label: 'Databases', + }, { type: 'doc', docId: 'faq', diff --git a/docs/package.json b/docs/package.json index bcc9cfea73b4..a034ed079cab 100644 --- a/docs/package.json +++ b/docs/package.json @@ -6,17 +6,22 @@ "scripts": { "docusaurus": "docusaurus", "_init": "cat src/intro_header.txt ../README.md > docs/intro.md", - "start": "yarn run _init && yarn run generate:extension-components && NODE_ENV=development docusaurus start", + "start": "yarn run _init && yarn run generate:extension-components && yarn run generate:database-docs && NODE_ENV=development docusaurus start", "stop": "pkill -f 'docusaurus start' || pkill -f 'docusaurus serve' || echo 'No docusaurus server running'", - "build": "yarn run _init && yarn run generate:extension-components && DEBUG=docusaurus:* docusaurus build", + "build": "yarn run _init && yarn run generate:extension-components && yarn run generate:database-docs && DEBUG=docusaurus:* docusaurus build", "swizzle": "docusaurus swizzle", "deploy": "docusaurus deploy", "clear": "docusaurus clear", "serve": "yarn run _init && docusaurus serve", "write-translations": "docusaurus write-translations", "write-heading-ids": "docusaurus write-heading-ids", - "typecheck": "yarn run generate:extension-components && tsc", + "typecheck": "yarn run generate:extension-components && yarn run generate:database-docs && tsc", "generate:extension-components": "node scripts/generate-extension-components.mjs", + "generate:database-docs": "node scripts/generate-database-docs.mjs", + "gen-db-docs": "node scripts/generate-database-docs.mjs", + "lint:db-metadata": "python3 ../superset/db_engine_specs/lint_metadata.py", + "lint:db-metadata:report": "python3 ../superset/db_engine_specs/lint_metadata.py --markdown -o ../superset/db_engine_specs/METADATA_STATUS.md", + "update:readme-db-logos": "node scripts/generate-database-docs.mjs --update-readme", "eslint": "eslint .", "version:add": "node scripts/manage-versions.mjs add", "version:remove": "node scripts/manage-versions.mjs remove", diff --git a/docs/scripts/generate-database-docs.mjs b/docs/scripts/generate-database-docs.mjs new file mode 100644 index 000000000000..cde02d127dc6 --- /dev/null +++ b/docs/scripts/generate-database-docs.mjs @@ -0,0 +1,867 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/** + * This script generates database documentation data from engine spec metadata. + * It outputs a JSON file that can be imported by React components for rendering. + * + * Usage: node scripts/generate-database-docs.mjs + * + * The script can run in two modes: + * 1. With Flask app (full diagnostics) - requires superset to be installed + * 2. Fallback mode (documentation only) - parses engine spec `metadata` attributes via AST + */ + +import { spawnSync } from 'child_process'; +import fs from 'fs'; +import path from 'path'; +import { fileURLToPath } from 'url'; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); +const ROOT_DIR = path.resolve(__dirname, '../..'); +const DOCS_DIR = path.resolve(__dirname, '..'); +const DATA_OUTPUT_DIR = path.join(DOCS_DIR, 'src/data'); +const DATA_OUTPUT_FILE = path.join(DATA_OUTPUT_DIR, 'databases.json'); +const MDX_OUTPUT_DIR = path.join(DOCS_DIR, 'docs/databases'); +const MDX_SUPPORTED_DIR = path.join(MDX_OUTPUT_DIR, 'supported'); + +/** + * Try to run the full lib.py script with Flask context + */ +function tryRunFullScript() { + try { + console.log('Attempting to run lib.py with Flask context...'); + const pythonCode = ` +import sys +import json +sys.path.insert(0, '.') +from superset.app import create_app +from superset.db_engine_specs.lib import generate_yaml_docs +app = create_app() +with app.app_context(): + docs = generate_yaml_docs() + print(json.dumps(docs, default=str)) +`; + const result = spawnSync('python', ['-c', pythonCode], { + cwd: ROOT_DIR, + encoding: 'utf-8', + timeout: 60000, + maxBuffer: 10 * 1024 * 1024, + env: { ...process.env, SUPERSET_SECRET_KEY: 'docs-build-key' }, + }); + + if (result.error) { + throw result.error; + } + if (result.status !== 0) { + throw new Error(result.stderr || 'Python script failed'); + } + return JSON.parse(result.stdout); + } catch (error) { + console.log('Full script execution failed, using fallback mode...'); + console.log(' Reason:', error.message?.split('\n')[0] || 'Unknown error'); + return null; + } +} + +/** + * Extract metadata from individual engine spec files using AST parsing + * This is the preferred approach - reads directly from spec.metadata attributes + * Supports metadata inheritance - child classes inherit and merge with parent metadata + */ +function extractEngineSpecMetadata() { + console.log('Extracting metadata from engine spec files...'); + console.log(` ROOT_DIR: ${ROOT_DIR}`); + + try { + const pythonCode = ` +import sys +import json +import ast +import os + +def eval_node(node): + """Safely evaluate an AST node as a Python literal.""" + if node is None: + return None + if isinstance(node, ast.Constant): + return node.value + elif isinstance(node, ast.List): + return [eval_node(e) for e in node.elts] + elif isinstance(node, ast.Dict): + result = {} + for k, v in zip(node.keys, node.values): + if k is not None: + key = eval_node(k) + if key is not None: + result[key] = eval_node(v) + return result + elif isinstance(node, ast.Name): + # Handle True, False, None constants + if node.id == 'True': + return True + elif node.id == 'False': + return False + elif node.id == 'None': + return None + return node.id + elif isinstance(node, ast.Attribute): + # Handle DatabaseCategory.SOMETHING - return just the attribute name + return node.attr + elif isinstance(node, ast.BinOp) and isinstance(node.op, ast.Add): + left, right = eval_node(node.left), eval_node(node.right) + if isinstance(left, str) and isinstance(right, str): + return left + right + return None + elif isinstance(node, ast.Tuple): + return tuple(eval_node(e) for e in node.elts) + elif isinstance(node, ast.JoinedStr): + # f-strings - just return a placeholder + return "" + return None + +def deep_merge(base, override): + """Deep merge two dictionaries. Override values take precedence.""" + if base is None: + return override + if override is None: + return base + if not isinstance(base, dict) or not isinstance(override, dict): + return override + + # Fields that should NOT be inherited from parent classes + # - compatible_databases: Each class defines its own compatible DBs + # - categories: Each class defines its own categories (not extended from parent) + NON_INHERITABLE_FIELDS = {'compatible_databases', 'categories'} + + result = base.copy() + # Remove non-inheritable fields from base (they should only come from the class that defines them) + for field in NON_INHERITABLE_FIELDS: + result.pop(field, None) + + for key, value in override.items(): + if key in result and isinstance(result[key], dict) and isinstance(value, dict): + result[key] = deep_merge(result[key], value) + elif key in result and isinstance(result[key], list) and isinstance(value, list): + # Extend lists from parent (e.g., drivers) + result[key] = result[key] + value + else: + result[key] = value + return result + +databases = {} +specs_dir = 'superset/db_engine_specs' +errors = [] +debug_info = { + "cwd": os.getcwd(), + "specs_dir_exists": os.path.isdir(specs_dir), + "files_checked": 0, + "classes_found": 0, + "classes_with_metadata": 0, + "inherited_metadata": 0, +} + +if not os.path.isdir(specs_dir): + print(json.dumps({"error": f"Directory not found: {specs_dir}", "cwd": os.getcwd()})) + sys.exit(1) + +# First pass: collect all class info (name, bases, metadata) +class_info = {} # class_name -> {bases: [], metadata: {}, engine_name: str, filename: str} + +for filename in sorted(os.listdir(specs_dir)): + if not filename.endswith('.py') or filename in ('__init__.py', 'lib.py', 'lint_metadata.py'): + continue + + debug_info["files_checked"] += 1 + filepath = os.path.join(specs_dir, filename) + try: + with open(filepath) as f: + source = f.read() + tree = ast.parse(source) + + for node in ast.walk(tree): + if not isinstance(node, ast.ClassDef): + continue + + # Get base class names + base_names = [] + for b in node.bases: + if isinstance(b, ast.Name): + base_names.append(b.id) + elif isinstance(b, ast.Attribute): + base_names.append(b.attr) + + is_engine_spec = any('EngineSpec' in name or 'Mixin' in name for name in base_names) + if not is_engine_spec: + continue + + # Extract class attributes + engine_name = None + metadata = None + + for item in node.body: + if isinstance(item, ast.Assign): + for target in item.targets: + if isinstance(target, ast.Name): + if target.id == 'engine_name': + val = eval_node(item.value) + if isinstance(val, str): + engine_name = val + elif target.id == 'metadata': + metadata = eval_node(item.value) + + # Check for engine attribute with non-empty value to distinguish + # true base classes from product classes like OceanBaseEngineSpec + has_non_empty_engine = False + for item in node.body: + if isinstance(item, ast.Assign): + for target in item.targets: + if isinstance(target, ast.Name) and target.id == 'engine': + # Check if engine value is non-empty string + if isinstance(item.value, ast.Constant): + has_non_empty_engine = bool(item.value.value) + break + + # True base classes: end with BaseEngineSpec AND don't define engine + # or have empty engine (like PostgresBaseEngineSpec with engine = "") + is_true_base = ( + node.name.endswith('BaseEngineSpec') and not has_non_empty_engine + ) or 'Mixin' in node.name + + # Store class info for inheritance resolution + class_info[node.name] = { + 'bases': base_names, + 'metadata': metadata, + 'engine_name': engine_name, + 'filename': filename, + 'is_base_or_mixin': is_true_base, + } + except Exception as e: + errors.append(f"{filename}: {str(e)}") + +# Second pass: resolve inheritance and build final metadata +def get_inherited_metadata(class_name, visited=None): + """Recursively get metadata from parent classes.""" + if visited is None: + visited = set() + if class_name in visited: + return {} # Prevent circular inheritance + visited.add(class_name) + + info = class_info.get(class_name) + if not info: + return {} + + # Start with parent metadata + inherited = {} + for base_name in info['bases']: + parent_metadata = get_inherited_metadata(base_name, visited.copy()) + if parent_metadata: + inherited = deep_merge(inherited, parent_metadata) + + # Merge with own metadata (own takes precedence) + if info['metadata']: + inherited = deep_merge(inherited, info['metadata']) + + return inherited + +for class_name, info in class_info.items(): + # Skip base classes and mixins + if info['is_base_or_mixin']: + continue + + debug_info["classes_found"] += 1 + + # Get final metadata with inheritance + final_metadata = get_inherited_metadata(class_name) + + # Remove compatible_databases if not defined by this class (it's not inheritable) + own_metadata = info['metadata'] or {} + if 'compatible_databases' not in own_metadata and 'compatible_databases' in final_metadata: + del final_metadata['compatible_databases'] + + # Track if we inherited anything + if final_metadata and final_metadata != own_metadata: + debug_info["inherited_metadata"] += 1 + + # Use class name as fallback for engine_name + display_name = info['engine_name'] or class_name.replace('EngineSpec', '').replace('_', ' ') + + if final_metadata and isinstance(final_metadata, dict) and display_name: + debug_info["classes_with_metadata"] += 1 + databases[display_name] = { + 'engine': display_name.lower().replace(' ', '_'), + 'engine_name': display_name, + 'module': info['filename'][:-3], # Remove .py extension + 'documentation': final_metadata, + 'time_grains': {}, + 'score': 0, + 'max_score': 0, + 'joins': True, + 'subqueries': True, + 'supports_dynamic_schema': False, + 'supports_catalog': False, + 'supports_dynamic_catalog': False, + 'ssh_tunneling': False, + 'query_cancelation': False, + 'supports_file_upload': False, + 'user_impersonation': False, + 'query_cost_estimation': False, + 'sql_validation': False, + } + +if errors and not databases: + print(json.dumps({"error": "Parse errors", "details": errors, "debug": debug_info}), file=sys.stderr) + +# Print debug info to stderr for troubleshooting +print(json.dumps(debug_info), file=sys.stderr) + +print(json.dumps(databases, default=str)) +`; + const result = spawnSync('python3', ['-c', pythonCode], { + cwd: ROOT_DIR, + encoding: 'utf-8', + timeout: 30000, + maxBuffer: 10 * 1024 * 1024, + }); + + if (result.error) { + throw result.error; + } + // Log debug info from stderr + if (result.stderr) { + console.log('Python debug info:', result.stderr.trim()); + } + if (result.status !== 0) { + throw new Error(result.stderr || 'Python script failed'); + } + const databases = JSON.parse(result.stdout); + if (Object.keys(databases).length === 0) { + throw new Error('No metadata found in engine specs'); + } + + console.log(`Extracted metadata from ${Object.keys(databases).length} engine specs`); + return databases; + } catch (err) { + console.log('Engine spec metadata extraction failed:', err.message); + return null; + } +} + +/** + * Build statistics from the database data + */ +function buildStatistics(databases) { + const stats = { + totalDatabases: Object.keys(databases).length, + withDocumentation: 0, + withConnectionString: 0, + withDrivers: 0, + withAuthMethods: 0, + supportsJoins: 0, + supportsSubqueries: 0, + supportsDynamicSchema: 0, + supportsCatalog: 0, + averageScore: 0, + maxScore: 0, + byCategory: {}, + }; + + let totalScore = 0; + + for (const [name, db] of Object.entries(databases)) { + const docs = db.documentation || {}; + + if (Object.keys(docs).length > 0) stats.withDocumentation++; + if (docs.connection_string || docs.drivers?.length > 0) + stats.withConnectionString++; + if (docs.drivers?.length > 0) stats.withDrivers++; + if (docs.authentication_methods?.length > 0) stats.withAuthMethods++; + if (db.joins) stats.supportsJoins++; + if (db.subqueries) stats.supportsSubqueries++; + if (db.supports_dynamic_schema) stats.supportsDynamicSchema++; + if (db.supports_catalog) stats.supportsCatalog++; + + totalScore += db.score || 0; + if (db.max_score > stats.maxScore) stats.maxScore = db.max_score; + + // Use categories from documentation metadata (computed by Python) + // Each database can belong to multiple categories + const categories = docs.categories || ['OTHER']; + for (const cat of categories) { + // Map category constant names to display names + const categoryDisplayNames = { + 'CLOUD_AWS': 'Cloud - AWS', + 'CLOUD_GCP': 'Cloud - Google', + 'CLOUD_AZURE': 'Cloud - Azure', + 'CLOUD_DATA_WAREHOUSES': 'Cloud Data Warehouses', + 'APACHE_PROJECTS': 'Apache Projects', + 'TRADITIONAL_RDBMS': 'Traditional RDBMS', + 'ANALYTICAL_DATABASES': 'Analytical Databases', + 'SEARCH_NOSQL': 'Search & NoSQL', + 'QUERY_ENGINES': 'Query Engines', + 'TIME_SERIES': 'Time Series Databases', + 'OTHER': 'Other Databases', + 'OPEN_SOURCE': 'Open Source', + 'HOSTED_OPEN_SOURCE': 'Hosted Open Source', + 'PROPRIETARY': 'Proprietary', + }; + const displayName = categoryDisplayNames[cat] || cat; + if (!stats.byCategory[displayName]) { + stats.byCategory[displayName] = []; + } + stats.byCategory[displayName].push(name); + } + } + + stats.averageScore = Math.round(totalScore / stats.totalDatabases); + + return stats; +} + +/** + * Convert database name to a URL-friendly slug + */ +function toSlug(name) { + return name + .toLowerCase() + .replace(/[^a-z0-9]+/g, '-') + .replace(/^-|-$/g, ''); +} + +/** + * Generate MDX content for a single database page + */ +function generateDatabaseMDX(name, db) { + const description = db.documentation?.description || `Documentation for ${name} database connection.`; + const shortDesc = description + .slice(0, 160) + .replace(/\\/g, '\\\\') + .replace(/"/g, '\\"'); + + return `--- +title: ${name} +sidebar_label: ${name} +description: "${shortDesc}" +hide_title: true +--- + +{/* +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an +"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, either express or implied. See the License for the +specific language governing permissions and limitations +under the License. +*/} + +import { DatabasePage } from '@site/src/components/databases'; +import databaseData from '@site/src/data/databases.json'; + + +`; +} + +/** + * Generate the index MDX for the databases overview + */ +function generateIndexMDX(statistics, usedFlaskContext = true) { + const fallbackNotice = usedFlaskContext ? '' : ` +:::info Developer Note +This documentation was built without Flask context, so feature diagnostics (scores, time grain support, etc.) +may not reflect actual database capabilities. For full diagnostics, build docs locally with: + +\`\`\`bash +cd docs && npm run gen-db-docs +\`\`\` + +This requires a working Superset development environment. +::: + +`; + + return `--- +title: Connecting to Databases +sidebar_label: Overview +sidebar_position: 1 +--- + +{/* +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an +"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, either express or implied. See the License for the +specific language governing permissions and limitations +under the License. +*/} + +import { DatabaseIndex } from '@site/src/components/databases'; +import databaseData from '@site/src/data/databases.json'; + +# Connecting to Databases + +Superset does not ship bundled with connectivity to databases. The main step in connecting +Superset to a database is to **install the proper database driver(s)** in your environment. + +:::note +You'll need to install the required packages for the database you want to use as your metadata database +as well as the packages needed to connect to the databases you want to access through Superset. +For information about setting up Superset's metadata database, please refer to +installation documentations ([Docker Compose](/docs/installation/docker-compose), [Kubernetes](/docs/installation/kubernetes)) +::: + +## Supported Databases + +Superset supports **${statistics.totalDatabases} databases** with varying levels of feature support. +Click on any database name to see detailed documentation including connection strings, +authentication methods, and configuration options. + + + +## Installing Database Drivers + +Superset requires a Python [DB-API database driver](https://peps.python.org/pep-0249/) +and a [SQLAlchemy dialect](https://docs.sqlalchemy.org/en/20/dialects/) to be installed for +each database engine you want to connect to. + +### Installing Drivers in Docker + +For Docker deployments, create a \`requirements-local.txt\` file in the \`docker\` directory: + +\`\`\`bash +# Create the requirements file +touch ./docker/requirements-local.txt + +# Add your driver (e.g., for PostgreSQL) +echo "psycopg2-binary" >> ./docker/requirements-local.txt +\`\`\` + +Then restart your containers. The drivers will be installed automatically. + +### Installing Drivers with pip + +For non-Docker installations: + +\`\`\`bash +pip install +\`\`\` + +See individual database pages for the specific driver packages needed. + +## Connecting Through the UI + +1. Go to **Settings → Data: Database Connections** +2. Click **+ DATABASE** +3. Select your database type or enter a SQLAlchemy URI +4. Click **Test Connection** to verify +5. Click **Connect** to save + +## Contributing + +To add or update database documentation, add a \`metadata\` attribute to your engine spec class in +\`superset/db_engine_specs/\`. Documentation is auto-generated from these metadata attributes. + +See [METADATA_STATUS.md](https://github.com/apache/superset/blob/master/superset/db_engine_specs/METADATA_STATUS.md) +for the current status of database documentation and the [README](https://github.com/apache/superset/blob/master/superset/db_engine_specs/README.md) for the metadata schema. +${fallbackNotice}`; +} + +const README_PATH = path.join(ROOT_DIR, 'README.md'); +const README_START_MARKER = ''; +const README_END_MARKER = ''; + +/** + * Generate the database logos HTML for README.md + * Only includes databases that have logos defined + */ +function generateReadmeLogos(databases) { + // Get databases with logos, sorted alphabetically + const dbsWithLogos = Object.entries(databases) + .filter(([, db]) => db.documentation?.logo) + .sort(([a], [b]) => a.localeCompare(b)); + + if (dbsWithLogos.length === 0) { + return ''; + } + + // Generate HTML img tags + const logoTags = dbsWithLogos.map(([name, db]) => { + const logo = db.documentation.logo; + const alt = name.toLowerCase().replace(/\s+/g, '-'); + // Use docs site URL for logos + return ` `; + }); + + return `

+${logoTags.join('\n')} +

`; +} + +/** + * Update the README.md with generated database logos + */ +function updateReadme(databases) { + if (!fs.existsSync(README_PATH)) { + console.log('README.md not found, skipping update'); + return false; + } + + const content = fs.readFileSync(README_PATH, 'utf-8'); + + // Check if markers exist + if (!content.includes(README_START_MARKER) || !content.includes(README_END_MARKER)) { + console.log('README.md missing database markers, skipping update'); + console.log(` Add ${README_START_MARKER} and ${README_END_MARKER} to enable auto-generation`); + return false; + } + + // Generate new logos section + const logosHtml = generateReadmeLogos(databases); + + // Replace content between markers + const pattern = new RegExp( + `${README_START_MARKER}[\\s\\S]*?${README_END_MARKER}`, + 'g' + ); + const newContent = content.replace( + pattern, + `${README_START_MARKER}\n${logosHtml}\n${README_END_MARKER}` + ); + + if (newContent !== content) { + fs.writeFileSync(README_PATH, newContent); + console.log('Updated README.md database logos'); + return true; + } + + console.log('README.md database logos unchanged'); + return false; +} + +/** + * Load existing database data if available + */ +function loadExistingData() { + if (!fs.existsSync(DATA_OUTPUT_FILE)) { + return null; + } + + try { + const content = fs.readFileSync(DATA_OUTPUT_FILE, 'utf-8'); + return JSON.parse(content); + } catch (error) { + console.log('Could not load existing data:', error.message); + return null; + } +} + +/** + * Merge new documentation with existing diagnostics + * Preserves score, time_grains, and feature flags from existing data + */ +function mergeWithExistingDiagnostics(newDatabases, existingData) { + if (!existingData?.databases) return newDatabases; + + const diagnosticFields = [ + 'score', 'max_score', 'time_grains', 'joins', 'subqueries', + 'supports_dynamic_schema', 'supports_catalog', 'supports_dynamic_catalog', + 'ssh_tunneling', 'query_cancelation', 'supports_file_upload', + 'user_impersonation', 'query_cost_estimation', 'sql_validation' + ]; + + for (const [name, db] of Object.entries(newDatabases)) { + const existingDb = existingData.databases[name]; + if (existingDb && existingDb.score > 0) { + // Preserve diagnostics from existing data + for (const field of diagnosticFields) { + if (existingDb[field] !== undefined) { + db[field] = existingDb[field]; + } + } + } + } + + const preserved = Object.values(newDatabases).filter(d => d.score > 0).length; + if (preserved > 0) { + console.log(`Preserved diagnostics for ${preserved} databases from existing data`); + } + + return newDatabases; +} + +/** + * Main function + */ +async function main() { + console.log('Generating database documentation...\n'); + + // Ensure output directories exist + if (!fs.existsSync(DATA_OUTPUT_DIR)) { + fs.mkdirSync(DATA_OUTPUT_DIR, { recursive: true }); + } + if (!fs.existsSync(MDX_OUTPUT_DIR)) { + fs.mkdirSync(MDX_OUTPUT_DIR, { recursive: true }); + } + + // Load existing data for potential merge + const existingData = loadExistingData(); + + // Try sources in order of preference: + // 1. Full script with Flask context (richest data with diagnostics) + // 2. Engine spec metadata files (works in CI without Flask) + let databases = tryRunFullScript(); + let usedFlaskContext = !!databases; + + if (!databases) { + // Extract from engine spec metadata (preferred for CI) + databases = extractEngineSpecMetadata(); + } + + if (!databases || Object.keys(databases).length === 0) { + console.error('Failed to generate database documentation data.'); + console.error('Could not extract from Flask app or engine spec metadata.'); + process.exit(1); + } + + console.log(`Processed ${Object.keys(databases).length} databases\n`); + + // Check if new data has scores; if not, preserve existing diagnostics + const hasNewScores = Object.values(databases).some((db) => db.score > 0); + if (!hasNewScores && existingData) { + databases = mergeWithExistingDiagnostics(databases, existingData); + } + + // Build statistics + const statistics = buildStatistics(databases); + + // Create the final output structure + const output = { + generated: new Date().toISOString(), + statistics, + databases, + }; + + // Write the JSON file (with trailing newline for POSIX compliance) + fs.writeFileSync(DATA_OUTPUT_FILE, JSON.stringify(output, null, 2) + '\n'); + console.log(`Generated: ${path.relative(DOCS_DIR, DATA_OUTPUT_FILE)}`); + + + // Ensure supported directory exists + if (!fs.existsSync(MDX_SUPPORTED_DIR)) { + fs.mkdirSync(MDX_SUPPORTED_DIR, { recursive: true }); + } + + // Clean up old MDX files that are no longer in the database list + console.log(`\nCleaning up old MDX files in ${path.relative(DOCS_DIR, MDX_SUPPORTED_DIR)}/`); + const existingMdxFiles = fs.readdirSync(MDX_SUPPORTED_DIR).filter(f => f.endsWith('.mdx')); + const validSlugs = new Set(Object.keys(databases).map(name => `${toSlug(name)}.mdx`)); + let removedCount = 0; + for (const file of existingMdxFiles) { + if (!validSlugs.has(file)) { + fs.unlinkSync(path.join(MDX_SUPPORTED_DIR, file)); + removedCount++; + } + } + if (removedCount > 0) { + console.log(` Removed ${removedCount} outdated MDX files`); + } + + // Generate individual MDX files for each database in supported/ subdirectory + console.log(`\nGenerating MDX files in ${path.relative(DOCS_DIR, MDX_SUPPORTED_DIR)}/`); + + let mdxCount = 0; + for (const [name, db] of Object.entries(databases)) { + const slug = toSlug(name); + const mdxContent = generateDatabaseMDX(name, db); + const mdxPath = path.join(MDX_SUPPORTED_DIR, `${slug}.mdx`); + fs.writeFileSync(mdxPath, mdxContent); + mdxCount++; + } + console.log(` Generated ${mdxCount} database pages`); + + // Generate index page in parent databases/ directory + const indexContent = generateIndexMDX(statistics, usedFlaskContext); + const indexPath = path.join(MDX_OUTPUT_DIR, 'index.mdx'); + fs.writeFileSync(indexPath, indexContent); + console.log(` Generated index page`); + + // Generate _category_.json for databases/ directory + const categoryJson = { + label: 'Databases', + position: 1, + link: { + type: 'doc', + id: 'databases/index', + }, + }; + fs.writeFileSync( + path.join(MDX_OUTPUT_DIR, '_category_.json'), + JSON.stringify(categoryJson, null, 2) + '\n' + ); + + // Generate _category_.json for supported/ subdirectory (collapsible) + const supportedCategoryJson = { + label: 'Supported Databases', + position: 2, + collapsed: true, + collapsible: true, + }; + fs.writeFileSync( + path.join(MDX_SUPPORTED_DIR, '_category_.json'), + JSON.stringify(supportedCategoryJson, null, 2) + '\n' + ); + console.log(` Generated _category_.json files`); + + // Update README.md database logos (only when explicitly requested) + if (process.env.UPDATE_README === 'true' || process.argv.includes('--update-readme')) { + console.log(''); + updateReadme(databases); + } + + console.log(`\nStatistics:`); + console.log(` Total databases: ${statistics.totalDatabases}`); + console.log(` With documentation: ${statistics.withDocumentation}`); + console.log(` With connection strings: ${statistics.withConnectionString}`); + console.log(` Categories: ${Object.keys(statistics.byCategory).length}`); + + console.log('\nDone!'); +} + +main().catch(console.error); diff --git a/docs/sidebars.js b/docs/sidebars.js index 4f231d16a599..8a808ed31b61 100644 --- a/docs/sidebars.js +++ b/docs/sidebars.js @@ -57,6 +57,20 @@ const sidebars = { }, ], }, + { + type: 'category', + label: 'Databases', + link: { + type: 'doc', + id: 'databases/index', + }, + items: [ + { + type: 'autogenerated', + dirName: 'databases', + }, + ], + }, { type: 'category', label: 'Using Superset', diff --git a/docs/src/components/SectionHeader.tsx b/docs/src/components/SectionHeader.tsx index 387d021249b0..d0cc50e6cda7 100644 --- a/docs/src/components/SectionHeader.tsx +++ b/docs/src/components/SectionHeader.tsx @@ -98,6 +98,7 @@ interface SectionHeaderProps { title: string; subtitle?: string | ReactNode; dark?: boolean; + link?: string; } const SectionHeader = ({ @@ -105,15 +106,24 @@ const SectionHeader = ({ title, subtitle, dark, + link, }: SectionHeaderProps) => { const Heading = level; const StyledRoot = level === 'h1' ? StyledSectionHeaderH1 : StyledSectionHeaderH2; + const titleContent = link ? ( + + {title} + + ) : ( + title + ); + return ( - {title} + {titleContent} line {subtitle &&
{subtitle}
}
diff --git a/docs/src/components/databases/DatabaseIndex.tsx b/docs/src/components/databases/DatabaseIndex.tsx new file mode 100644 index 000000000000..89eee4782dc0 --- /dev/null +++ b/docs/src/components/databases/DatabaseIndex.tsx @@ -0,0 +1,578 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import React, { useState, useMemo } from 'react'; +import { Card, Row, Col, Statistic, Table, Tag, Input, Select, Tooltip } from 'antd'; +import { + DatabaseOutlined, + CheckCircleOutlined, + ApiOutlined, + KeyOutlined, + SearchOutlined, + LinkOutlined, +} from '@ant-design/icons'; +import type { DatabaseData, DatabaseInfo, TimeGrains } from './types'; + +interface DatabaseIndexProps { + data: DatabaseData; +} + +// Type for table entries (includes both regular DBs and compatible DBs) +interface TableEntry { + name: string; + categories: string[]; // Multiple categories supported + score: number; + max_score: number; + timeGrainCount: number; + time_grains?: TimeGrains; + hasDrivers: boolean; + hasAuthMethods: boolean; + hasConnectionString: boolean; + joins?: boolean; + subqueries?: boolean; + supports_dynamic_schema?: boolean; + supports_catalog?: boolean; + ssh_tunneling?: boolean; + supports_file_upload?: boolean; + query_cancelation?: boolean; + query_cost_estimation?: boolean; + user_impersonation?: boolean; + sql_validation?: boolean; + documentation?: DatabaseInfo['documentation']; + // For compatible databases + isCompatible?: boolean; + compatibleWith?: string; + compatibleDescription?: string; +} + +// Map category constant names to display names +const CATEGORY_DISPLAY_NAMES: Record = { + 'CLOUD_AWS': 'Cloud - AWS', + 'CLOUD_GCP': 'Cloud - Google', + 'CLOUD_AZURE': 'Cloud - Azure', + 'CLOUD_DATA_WAREHOUSES': 'Cloud Data Warehouses', + 'APACHE_PROJECTS': 'Apache Projects', + 'TRADITIONAL_RDBMS': 'Traditional RDBMS', + 'ANALYTICAL_DATABASES': 'Analytical Databases', + 'SEARCH_NOSQL': 'Search & NoSQL', + 'QUERY_ENGINES': 'Query Engines', + 'TIME_SERIES': 'Time Series Databases', + 'OTHER': 'Other Databases', + 'OPEN_SOURCE': 'Open Source', + 'HOSTED_OPEN_SOURCE': 'Hosted Open Source', + 'PROPRIETARY': 'Proprietary', +}; + +// Category colors for visual distinction +const CATEGORY_COLORS: Record = { + 'Cloud - AWS': 'orange', + 'Cloud - Google': 'blue', + 'Cloud - Azure': 'cyan', + 'Cloud Data Warehouses': 'purple', + 'Apache Projects': 'red', + 'Traditional RDBMS': 'green', + 'Analytical Databases': 'magenta', + 'Search & NoSQL': 'gold', + 'Query Engines': 'lime', + 'Time Series Databases': 'volcano', + 'Other Databases': 'default', + // Licensing categories + 'Open Source': 'geekblue', + 'Hosted Open Source': 'cyan', + 'Proprietary': 'default', +}; + +// Convert category constant to display name +function getCategoryDisplayName(cat: string): string { + return CATEGORY_DISPLAY_NAMES[cat] || cat; +} + +// Get categories for a database - uses categories from metadata when available +// Falls back to name-based inference for compatible databases without categories +function getCategories( + name: string, + documentationCategories?: string[] +): string[] { + // Prefer categories from documentation metadata (computed by Python) + if (documentationCategories && documentationCategories.length > 0) { + return documentationCategories.map(getCategoryDisplayName); + } + + // Fallback: infer from name (for compatible databases without categories) + const nameLower = name.toLowerCase(); + + if (nameLower.includes('aws') || nameLower.includes('amazon')) + return ['Cloud - AWS']; + if (nameLower.includes('google') || nameLower.includes('bigquery')) + return ['Cloud - Google']; + if (nameLower.includes('azure') || nameLower.includes('microsoft')) + return ['Cloud - Azure']; + if (nameLower.includes('snowflake') || nameLower.includes('databricks')) + return ['Cloud Data Warehouses']; + if ( + nameLower.includes('apache') || + nameLower.includes('druid') || + nameLower.includes('hive') || + nameLower.includes('spark') + ) + return ['Apache Projects']; + if ( + nameLower.includes('postgres') || + nameLower.includes('mysql') || + nameLower.includes('sqlite') || + nameLower.includes('mariadb') + ) + return ['Traditional RDBMS']; + if ( + nameLower.includes('clickhouse') || + nameLower.includes('vertica') || + nameLower.includes('starrocks') + ) + return ['Analytical Databases']; + if ( + nameLower.includes('elastic') || + nameLower.includes('solr') || + nameLower.includes('couchbase') + ) + return ['Search & NoSQL']; + if (nameLower.includes('trino') || nameLower.includes('presto')) + return ['Query Engines']; + + return ['Other Databases']; +} + +// Count supported time grains +function countTimeGrains(db: DatabaseInfo): number { + if (!db.time_grains) return 0; + return Object.values(db.time_grains).filter(Boolean).length; +} + +// Format time grain name for display (e.g., FIVE_MINUTES -> "5 min") +function formatTimeGrain(grain: string): string { + const mapping: Record = { + SECOND: 'Second', + FIVE_SECONDS: '5 sec', + THIRTY_SECONDS: '30 sec', + MINUTE: 'Minute', + FIVE_MINUTES: '5 min', + TEN_MINUTES: '10 min', + FIFTEEN_MINUTES: '15 min', + THIRTY_MINUTES: '30 min', + HALF_HOUR: '30 min', + HOUR: 'Hour', + SIX_HOURS: '6 hours', + DAY: 'Day', + WEEK: 'Week', + WEEK_STARTING_SUNDAY: 'Week (Sun)', + WEEK_STARTING_MONDAY: 'Week (Mon)', + WEEK_ENDING_SATURDAY: 'Week (→Sat)', + WEEK_ENDING_SUNDAY: 'Week (→Sun)', + MONTH: 'Month', + QUARTER: 'Quarter', + QUARTER_YEAR: 'Quarter', + YEAR: 'Year', + }; + return mapping[grain] || grain; +} + +// Get list of supported time grains for tooltip +function getSupportedTimeGrains(timeGrains?: TimeGrains): string[] { + if (!timeGrains) return []; + return Object.entries(timeGrains) + .filter(([, supported]) => supported) + .map(([grain]) => formatTimeGrain(grain)); +} + +const DatabaseIndex: React.FC = ({ data }) => { + const [searchText, setSearchText] = useState(''); + const [categoryFilter, setCategoryFilter] = useState(null); + + const { statistics, databases } = data; + + // Convert databases object to array, including compatible databases + const databaseList = useMemo(() => { + const entries: TableEntry[] = []; + + Object.entries(databases).forEach(([name, db]) => { + // Add the main database + // Use categories from documentation metadata (computed by Python) when available + entries.push({ + ...db, + name, + categories: getCategories(name, db.documentation?.categories), + timeGrainCount: countTimeGrains(db), + hasDrivers: (db.documentation?.drivers?.length ?? 0) > 0, + hasAuthMethods: (db.documentation?.authentication_methods?.length ?? 0) > 0, + hasConnectionString: Boolean( + db.documentation?.connection_string || + (db.documentation?.drivers?.length ?? 0) > 0 + ), + isCompatible: false, + }); + + // Add compatible databases from this database's documentation + const compatibleDbs = db.documentation?.compatible_databases ?? []; + compatibleDbs.forEach((compat) => { + // Check if this compatible DB already exists as a main entry + const existsAsMain = Object.keys(databases).some( + (dbName) => dbName.toLowerCase() === compat.name.toLowerCase() + ); + + if (!existsAsMain) { + // Compatible databases: use their categories if defined, or infer from name + entries.push({ + name: compat.name, + categories: getCategories(compat.name, compat.categories), + // Compatible DBs inherit scores from parent + score: db.score, + max_score: db.max_score, + timeGrainCount: countTimeGrains(db), + hasDrivers: false, + hasAuthMethods: false, + hasConnectionString: Boolean(compat.connection_string), + joins: db.joins, + subqueries: db.subqueries, + supports_dynamic_schema: db.supports_dynamic_schema, + supports_catalog: db.supports_catalog, + ssh_tunneling: db.ssh_tunneling, + documentation: { + description: compat.description, + connection_string: compat.connection_string, + pypi_packages: compat.pypi_packages, + }, + isCompatible: true, + compatibleWith: name, + compatibleDescription: `Uses ${name} driver`, + }); + } + }); + }); + + return entries; + }, [databases]); + + // Filter and sort databases + const filteredDatabases = useMemo(() => { + return databaseList + .filter((db) => { + const matchesSearch = + !searchText || + db.name.toLowerCase().includes(searchText.toLowerCase()) || + db.documentation?.description + ?.toLowerCase() + .includes(searchText.toLowerCase()); + const matchesCategory = !categoryFilter || db.categories.includes(categoryFilter); + return matchesSearch && matchesCategory; + }) + .sort((a, b) => b.score - a.score); + }, [databaseList, searchText, categoryFilter]); + + // Get unique categories and counts for filter + const { categories, categoryCounts } = useMemo(() => { + const counts: Record = {}; + databaseList.forEach((db) => { + // Count each category the database belongs to + db.categories.forEach((cat) => { + counts[cat] = (counts[cat] || 0) + 1; + }); + }); + return { + categories: Object.keys(counts).sort(), + categoryCounts: counts, + }; + }, [databaseList]); + + // Table columns + const columns = [ + { + title: 'Database', + dataIndex: 'name', + key: 'name', + sorter: (a: TableEntry, b: TableEntry) => a.name.localeCompare(b.name), + render: (name: string, record: TableEntry) => { + // Convert name to URL slug + const toSlug = (n: string) => n.toLowerCase().replace(/[^a-z0-9]+/g, '-').replace(/^-|-$/g, ''); + + // Link to parent for compatible DBs, otherwise to own page + const linkTarget = record.isCompatible && record.compatibleWith + ? `/docs/databases/supported/${toSlug(record.compatibleWith)}` + : `/docs/databases/supported/${toSlug(name)}`; + + return ( +
+ + {name} + + {record.isCompatible && record.compatibleWith && ( + } + color="geekblue" + style={{ marginLeft: 8, fontSize: '11px' }} + > + {record.compatibleWith} compatible + + )} +
+ {record.documentation?.description?.slice(0, 80)} + {(record.documentation?.description?.length ?? 0) > 80 ? '...' : ''} +
+
+ ); + }, + }, + { + title: 'Categories', + dataIndex: 'categories', + key: 'categories', + width: 220, + filters: categories.map((cat) => ({ text: cat, value: cat })), + onFilter: (value: React.Key | boolean, record: TableEntry) => + record.categories.includes(value as string), + render: (cats: string[]) => ( +
+ {cats.map((cat) => ( + {cat} + ))} +
+ ), + }, + { + title: 'Score', + dataIndex: 'score', + key: 'score', + width: 80, + sorter: (a: TableEntry, b: TableEntry) => a.score - b.score, + defaultSortOrder: 'descend' as const, + render: (score: number, record: TableEntry) => ( + 150 ? '#52c41a' : score > 100 ? '#1890ff' : '#666', + fontWeight: score > 150 ? 'bold' : 'normal', + }} + > + {score}/{record.max_score} + + ), + }, + { + title: 'Time Grains', + dataIndex: 'timeGrainCount', + key: 'timeGrainCount', + width: 100, + sorter: (a: TableEntry, b: TableEntry) => a.timeGrainCount - b.timeGrainCount, + render: (count: number, record: TableEntry) => { + if (count === 0) return -; + const grains = getSupportedTimeGrains(record.time_grains); + return ( + + {grains.map((grain) => ( + {grain} + ))} + + } + placement="top" + > + + {count} grains + + + ); + }, + }, + { + title: 'Features', + key: 'features', + width: 280, + filters: [ + { text: 'JOINs', value: 'joins' }, + { text: 'Subqueries', value: 'subqueries' }, + { text: 'Dynamic Schema', value: 'dynamic_schema' }, + { text: 'Catalog', value: 'catalog' }, + { text: 'SSH Tunneling', value: 'ssh' }, + { text: 'File Upload', value: 'file_upload' }, + { text: 'Query Cancel', value: 'query_cancel' }, + { text: 'Cost Estimation', value: 'cost_estimation' }, + { text: 'User Impersonation', value: 'impersonation' }, + { text: 'SQL Validation', value: 'sql_validation' }, + ], + onFilter: (value: React.Key | boolean, record: TableEntry) => { + switch (value) { + case 'joins': + return Boolean(record.joins); + case 'subqueries': + return Boolean(record.subqueries); + case 'dynamic_schema': + return Boolean(record.supports_dynamic_schema); + case 'catalog': + return Boolean(record.supports_catalog); + case 'ssh': + return Boolean(record.ssh_tunneling); + case 'file_upload': + return Boolean(record.supports_file_upload); + case 'query_cancel': + return Boolean(record.query_cancelation); + case 'cost_estimation': + return Boolean(record.query_cost_estimation); + case 'impersonation': + return Boolean(record.user_impersonation); + case 'sql_validation': + return Boolean(record.sql_validation); + default: + return true; + } + }, + render: (_: unknown, record: TableEntry) => ( +
+ {record.joins && JOINs} + {record.subqueries && Subqueries} + {record.supports_dynamic_schema && Dynamic Schema} + {record.supports_catalog && Catalog} + {record.ssh_tunneling && SSH} + {record.supports_file_upload && File Upload} + {record.query_cancelation && Query Cancel} + {record.query_cost_estimation && Cost Est.} + {record.user_impersonation && Impersonation} + {record.sql_validation && SQL Validation} +
+ ), + }, + { + title: 'Documentation', + key: 'docs', + width: 150, + render: (_: unknown, record: TableEntry) => ( +
+ {record.hasConnectionString && ( + } color="default"> + Connection + + )} + {record.hasDrivers && ( + } color="default"> + Drivers + + )} + {record.hasAuthMethods && ( + } color="default"> + Auth + + )} +
+ ), + }, + ]; + + return ( +
+ {/* Statistics Cards */} + + + + } + /> + + + + + } + suffix={`/ ${statistics.totalDatabases}`} + /> + + + + + } + /> + + + + + } + /> + + + + + {/* Filters */} + + + } + value={searchText} + onChange={(e) => setSearchText(e.target.value)} + allowClear + /> + + +