diff --git a/RESOURCES/INTHEWILD.yaml b/RESOURCES/INTHEWILD.yaml index 0627c0bf488c..f68a9029efc2 100644 --- a/RESOURCES/INTHEWILD.yaml +++ b/RESOURCES/INTHEWILD.yaml @@ -136,10 +136,6 @@ categories: url: https://www.dropit.shop/ contributors: ["@dropit-dev"] - - name: Fanatics - url: https://www.fanatics.com/ - contributors: ["@coderfender"] - - name: Fordeal url: https://www.fordeal.com contributors: ["@Renkai"] @@ -622,6 +618,20 @@ categories: - name: Stockarea url: https://stockarea.io + Sports: + - name: Club 25 de Agosto (Femenino / Women's Team) + url: https://www.instagram.com/25deagosto.basketfemenino/ + contributors: [ "@lion90" ] + logo: club25deagosto.svg + + - name: Fanatics + url: https://www.fanatics.com/ + contributors: [ "@coderfender" ] + + - name: komoot + url: https://www.komoot.com/ + contributors: [ "@christophlingg" ] + Others: - name: 10Web url: https://10web.io/ @@ -657,10 +667,6 @@ categories: url: https://www.increff.com/ contributors: ["@ishansinghania"] - - name: komoot - url: https://www.komoot.com/ - contributors: ["@christophlingg"] - - name: Let's Roam url: https://www.letsroam.com/ diff --git a/UPDATING.md b/UPDATING.md index e3490b87b281..89418e4102cc 100644 --- a/UPDATING.md +++ b/UPDATING.md @@ -163,6 +163,28 @@ See `superset/mcp_service/PRODUCTION.md` for deployment guides. - [35062](https://github.com/apache/superset/pull/35062): Changed the function signature of `setupExtensions` to `setupCodeOverrides` with options as arguments. ### Breaking Changes +- [37370](https://github.com/apache/superset/pull/37370): The `APP_NAME` configuration variable no longer controls the browser window/tab title or other frontend branding. Application names should now be configured using the theme system with the `brandAppName` token. The `APP_NAME` config is still used for backend contexts (MCP service, logs, etc.) and serves as a fallback if `brandAppName` is not set. + - **Migration:** + ```python + # Before (Superset 5.x) + APP_NAME = "My Custom App" + + # After (Superset 6.x) - Option 1: Use theme system (recommended) + THEME_DEFAULT = { + "token": { + "brandAppName": "My Custom App", # Window titles + "brandLogoAlt": "My Custom App", # Logo alt text + "brandLogoUrl": "/static/assets/images/custom_logo.png" + } + } + + # After (Superset 6.x) - Option 2: Temporary fallback + # Keep APP_NAME for now (will be used as fallback for brandAppName) + APP_NAME = "My Custom App" + # But you should migrate to THEME_DEFAULT.token.brandAppName + ``` + - **Note:** For dark mode, set the same tokens in `THEME_DARK` configuration. + - [36317](https://github.com/apache/superset/pull/36317): The `CUSTOM_FONT_URLS` configuration option has been removed. Use the new per-theme `fontUrls` token in `THEME_DEFAULT` or database-managed themes instead. - **Before:** ```python @@ -177,7 +199,7 @@ See `superset/mcp_service/PRODUCTION.md` for deployment guides. "fontUrls": [ "https://fonts.example.com/myfont.css", ], - # ... other tokens + # ... other tokens } } ``` diff --git a/docs/scripts/extract_custom_errors.py b/docs/scripts/extract_custom_errors.py new file mode 100644 index 000000000000..35aee1cdbc2b --- /dev/null +++ b/docs/scripts/extract_custom_errors.py @@ -0,0 +1,296 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +Extract custom_errors from database engine specs for documentation. + +This script parses engine spec files to extract error handling information +that can be displayed on database documentation pages. + +Usage: python scripts/extract_custom_errors.py +Output: JSON mapping of engine spec module names to their custom errors +""" + +import ast +import json # noqa: TID251 - standalone docs script, not part of superset +import sys +from pathlib import Path +from typing import Any + +# Map SupersetErrorType values to human-readable categories and issue codes +ERROR_TYPE_INFO = { + "CONNECTION_INVALID_USERNAME_ERROR": { + "category": "Authentication", + "description": "Invalid username", + "issue_codes": [1012], + }, + "CONNECTION_INVALID_PASSWORD_ERROR": { + "category": "Authentication", + "description": "Invalid password", + "issue_codes": [1013], + }, + "CONNECTION_ACCESS_DENIED_ERROR": { + "category": "Authentication", + "description": "Access denied", + "issue_codes": [1014, 1015], + }, + "CONNECTION_INVALID_HOSTNAME_ERROR": { + "category": "Connection", + "description": "Invalid hostname", + "issue_codes": [1007], + }, + "CONNECTION_PORT_CLOSED_ERROR": { + "category": "Connection", + "description": "Port closed or refused", + "issue_codes": [1008], + }, + "CONNECTION_HOST_DOWN_ERROR": { + "category": "Connection", + "description": "Host unreachable", + "issue_codes": [1009], + }, + "CONNECTION_UNKNOWN_DATABASE_ERROR": { + "category": "Connection", + "description": "Unknown database", + "issue_codes": [1015], + }, + "CONNECTION_DATABASE_PERMISSIONS_ERROR": { + "category": "Permissions", + "description": "Insufficient permissions", + "issue_codes": [1017], + }, + "CONNECTION_MISSING_PARAMETERS_ERROR": { + "category": "Configuration", + "description": "Missing parameters", + "issue_codes": [1018], + }, + "CONNECTION_DATABASE_TIMEOUT": { + "category": "Connection", + "description": "Connection timeout", + "issue_codes": [1001, 1009], + }, + "COLUMN_DOES_NOT_EXIST_ERROR": { + "category": "Query", + "description": "Column not found", + "issue_codes": [1003, 1004], + }, + "TABLE_DOES_NOT_EXIST_ERROR": { + "category": "Query", + "description": "Table not found", + "issue_codes": [1003, 1005], + }, + "SCHEMA_DOES_NOT_EXIST_ERROR": { + "category": "Query", + "description": "Schema not found", + "issue_codes": [1003, 1016], + }, + "SYNTAX_ERROR": { + "category": "Query", + "description": "SQL syntax error", + "issue_codes": [1030], + }, + "OBJECT_DOES_NOT_EXIST_ERROR": { + "category": "Query", + "description": "Object not found", + "issue_codes": [1029], + }, + "GENERIC_DB_ENGINE_ERROR": { + "category": "General", + "description": "Database engine error", + "issue_codes": [1002], + }, +} + + +def extract_string_from_call(node: ast.Call) -> str | None: + """Extract string from __() or _() translation calls.""" + if not node.args: + return None + arg = node.args[0] + if isinstance(arg, ast.Constant) and isinstance(arg.value, str): + return arg.value + elif isinstance(arg, ast.JoinedStr): + # f-string - try to reconstruct + parts = [] + for value in arg.values: + if isinstance(value, ast.Constant): + parts.append(str(value.value)) + elif isinstance(value, ast.FormattedValue): + # Just use a placeholder + parts.append("{...}") + return "".join(parts) + return None + + +def extract_custom_errors_from_file(filepath: Path) -> dict[str, list[dict[str, Any]]]: + """ + Extract custom_errors definitions from a Python engine spec file. + + Returns a dict mapping class names to their custom errors list. + """ + results = {} + + try: + with open(filepath, "r", encoding="utf-8") as f: + source = f.read() + + tree = ast.parse(source) + + for node in ast.walk(tree): + if isinstance(node, ast.ClassDef): + class_name = node.name + + for item in node.body: + # Look for custom_errors = { ... } + if ( + isinstance(item, ast.AnnAssign) + and isinstance(item.target, ast.Name) + and item.target.id == "custom_errors" + and isinstance(item.value, ast.Dict) + ): + errors = extract_errors_from_dict(item.value, source) + if errors: + results[class_name] = errors + + # Also handle simple assignment: custom_errors = { ... } + elif ( + isinstance(item, ast.Assign) + and len(item.targets) == 1 + and isinstance(item.targets[0], ast.Name) + and item.targets[0].id == "custom_errors" + and isinstance(item.value, ast.Dict) + ): + errors = extract_errors_from_dict(item.value, source) + if errors: + results[class_name] = errors + + except (OSError, SyntaxError, ValueError) as e: + print(f"Error parsing {filepath}: {e}", file=sys.stderr) + + return results + + +def extract_regex_info(key: ast.expr) -> dict[str, Any]: + """Extract regex pattern info from the dict key.""" + if isinstance(key, ast.Name): + return {"regex_name": key.id} + if isinstance(key, ast.Call): + if ( + isinstance(key.func, ast.Attribute) + and key.func.attr == "compile" + and key.args + and isinstance(key.args[0], ast.Constant) + ): + return {"regex_pattern": key.args[0].value} + return {} + + +def extract_invalid_fields(extra_node: ast.Dict) -> list[str]: + """Extract invalid fields from the extra dict.""" + for k, v in zip(extra_node.keys, extra_node.values, strict=False): + if ( + isinstance(k, ast.Constant) + and k.value == "invalid" + and isinstance(v, ast.List) + ): + return [elem.value for elem in v.elts if isinstance(elem, ast.Constant)] + return [] + + +def extract_error_tuple_info(value: ast.Tuple) -> dict[str, Any]: + """Extract error info from the (message, error_type, extra) tuple.""" + result: dict[str, Any] = {} + + # First element: message template + msg_node = value.elts[0] + if isinstance(msg_node, ast.Call): + message = extract_string_from_call(msg_node) + if message: + result["message_template"] = message + elif isinstance(msg_node, ast.Constant): + result["message_template"] = msg_node.value + + # Second element: SupersetErrorType.SOMETHING + type_node = value.elts[1] + if isinstance(type_node, ast.Attribute): + error_type = type_node.attr + result["error_type"] = error_type + if error_type in ERROR_TYPE_INFO: + type_info = ERROR_TYPE_INFO[error_type] + result["category"] = type_info["category"] + result["description"] = type_info["description"] + result["issue_codes"] = type_info["issue_codes"] + + # Third element: extra dict with invalid fields + if len(value.elts) >= 3 and isinstance(value.elts[2], ast.Dict): + invalid_fields = extract_invalid_fields(value.elts[2]) + if invalid_fields: + result["invalid_fields"] = invalid_fields + + return result + + +def extract_errors_from_dict(dict_node: ast.Dict, source: str) -> list[dict[str, Any]]: + """Extract error information from a custom_errors dict AST node.""" + errors = [] + + for key, value in zip(dict_node.keys, dict_node.values, strict=False): + if key is None or value is None: + continue + + error_info = extract_regex_info(key) + + if isinstance(value, ast.Tuple) and len(value.elts) >= 2: + error_info.update(extract_error_tuple_info(value)) + + if error_info.get("error_type") and error_info.get("message_template"): + errors.append(error_info) + + return errors + + +def main() -> None: + """Main function to extract custom_errors from all engine specs.""" + # Find the superset root directory + script_dir = Path(__file__).parent + root_dir = script_dir.parent.parent + specs_dir = root_dir / "superset" / "db_engine_specs" + + if not specs_dir.exists(): + print(f"Error: Engine specs directory not found: {specs_dir}", file=sys.stderr) + sys.exit(1) + + all_errors = {} + + # Process each Python file in the specs directory + for filepath in sorted(specs_dir.glob("*.py")): + if filepath.name.startswith("_"): + continue + + module_name = filepath.stem + class_errors = extract_custom_errors_from_file(filepath) + + if class_errors: + # Store errors by module and class + all_errors[module_name] = class_errors + + # Output as JSON + print(json.dumps(all_errors, indent=2)) + + +if __name__ == "__main__": + main() diff --git a/docs/scripts/generate-database-docs.mjs b/docs/scripts/generate-database-docs.mjs index cde02d127dc6..04980f24f833 100644 --- a/docs/scripts/generate-database-docs.mjs +++ b/docs/scripts/generate-database-docs.mjs @@ -675,6 +675,78 @@ function updateReadme(databases) { return false; } +/** + * Extract custom_errors from engine specs for troubleshooting documentation + * Returns a map of module names to their custom errors + */ +function extractCustomErrors() { + console.log('Extracting custom_errors from engine specs...'); + + try { + const scriptPath = path.join(__dirname, 'extract_custom_errors.py'); + const result = spawnSync('python3', [scriptPath], { + cwd: ROOT_DIR, + encoding: 'utf-8', + timeout: 30000, + maxBuffer: 10 * 1024 * 1024, + }); + + if (result.error) { + throw result.error; + } + if (result.status !== 0) { + throw new Error(result.stderr || 'Python script failed'); + } + + const customErrors = JSON.parse(result.stdout); + const moduleCount = Object.keys(customErrors).length; + const errorCount = Object.values(customErrors).reduce((sum, classes) => + sum + Object.values(classes).reduce((s, errs) => s + errs.length, 0), 0); + console.log(` Found ${errorCount} custom errors across ${moduleCount} modules`); + return customErrors; + } catch (err) { + console.log(' Could not extract custom_errors:', err.message); + return null; + } +} + +/** + * Merge custom_errors into database documentation + * Maps by module name since that's how both datasets are keyed + */ +function mergeCustomErrors(databases, customErrors) { + if (!customErrors) return; + + let mergedCount = 0; + + for (const [, db] of Object.entries(databases)) { + if (!db.module) continue; + // Normalize module name: Flask mode uses full path (superset.db_engine_specs.postgres), + // but customErrors is keyed by file stem (postgres) + const moduleName = db.module.split('.').pop(); + if (!customErrors[moduleName]) continue; + + // Get all errors from all classes in this module + const moduleErrors = customErrors[moduleName]; + const allErrors = []; + + for (const classErrors of Object.values(moduleErrors)) { + allErrors.push(...classErrors); + } + + if (allErrors.length > 0) { + // Add to documentation + db.documentation = db.documentation || {}; + db.documentation.custom_errors = allErrors; + mergedCount++; + } + } + + if (mergedCount > 0) { + console.log(` Merged custom_errors into ${mergedCount} database docs`); + } +} + /** * Load existing database data if available */ @@ -768,6 +840,10 @@ async function main() { databases = mergeWithExistingDiagnostics(databases, existingData); } + // Extract and merge custom_errors for troubleshooting documentation + const customErrors = extractCustomErrors(); + mergeCustomErrors(databases, customErrors); + // Build statistics const statistics = buildStatistics(databases); diff --git a/docs/src/components/databases/DatabaseIndex.tsx b/docs/src/components/databases/DatabaseIndex.tsx index 89eee4782dc0..fee4f1a36bb1 100644 --- a/docs/src/components/databases/DatabaseIndex.tsx +++ b/docs/src/components/databases/DatabaseIndex.tsx @@ -26,6 +26,7 @@ import { KeyOutlined, SearchOutlined, LinkOutlined, + BugOutlined, } from '@ant-design/icons'; import type { DatabaseData, DatabaseInfo, TimeGrains } from './types'; @@ -44,6 +45,8 @@ interface TableEntry { hasDrivers: boolean; hasAuthMethods: boolean; hasConnectionString: boolean; + hasCustomErrors: boolean; + customErrorCount: number; joins?: boolean; subqueries?: boolean; supports_dynamic_schema?: boolean; @@ -223,6 +226,8 @@ const DatabaseIndex: React.FC = ({ data }) => { db.documentation?.connection_string || (db.documentation?.drivers?.length ?? 0) > 0 ), + hasCustomErrors: (db.documentation?.custom_errors?.length ?? 0) > 0, + customErrorCount: db.documentation?.custom_errors?.length ?? 0, isCompatible: false, }); @@ -246,6 +251,8 @@ const DatabaseIndex: React.FC = ({ data }) => { hasDrivers: false, hasAuthMethods: false, hasConnectionString: Boolean(compat.connection_string), + hasCustomErrors: false, + customErrorCount: 0, joins: db.joins, subqueries: db.subqueries, supports_dynamic_schema: db.supports_dynamic_schema, @@ -457,7 +464,7 @@ const DatabaseIndex: React.FC = ({ data }) => { { title: 'Documentation', key: 'docs', - width: 150, + width: 180, render: (_: unknown, record: TableEntry) => (
{record.hasConnectionString && ( @@ -475,6 +482,13 @@ const DatabaseIndex: React.FC = ({ data }) => { Auth )} + {record.hasCustomErrors && ( + + } color="volcano"> + Errors + + + )}
), }, diff --git a/docs/src/components/databases/DatabasePage.tsx b/docs/src/components/databases/DatabasePage.tsx index 836680b74620..c02d4a44a1ca 100644 --- a/docs/src/components/databases/DatabasePage.tsx +++ b/docs/src/components/databases/DatabasePage.tsx @@ -39,6 +39,7 @@ import { BookOutlined, EditOutlined, GithubOutlined, + BugOutlined, } from '@ant-design/icons'; import type { DatabaseInfo } from './types'; @@ -414,6 +415,132 @@ const DatabasePage: React.FC = ({ database, name }) => { ); }; + // Render troubleshooting / custom errors section + const renderTroubleshooting = () => { + if (!docs?.custom_errors?.length) return null; + + // Group errors by category + const errorsByCategory: Record = {}; + for (const error of docs.custom_errors) { + const category = error.category || 'General'; + if (!errorsByCategory[category]) { + errorsByCategory[category] = []; + } + errorsByCategory[category].push(error); + } + + // Define category order for consistent display + const categoryOrder = [ + 'Authentication', + 'Connection', + 'Permissions', + 'Query', + 'Configuration', + 'General', + ]; + + const sortedCategories = Object.keys(errorsByCategory).sort((a, b) => { + const aIdx = categoryOrder.indexOf(a); + const bIdx = categoryOrder.indexOf(b); + if (aIdx === -1 && bIdx === -1) return a.localeCompare(b); + if (aIdx === -1) return 1; + if (bIdx === -1) return -1; + return aIdx - bIdx; + }); + + // Category colors + const categoryColors: Record = { + Authentication: 'orange', + Connection: 'red', + Permissions: 'purple', + Query: 'blue', + Configuration: 'cyan', + General: 'default', + }; + + return ( + + Troubleshooting + + } + style={{ marginBottom: 16 }} + > + + Common error messages you may encounter when connecting to or querying{' '} + {name}, along with their causes and solutions. + + + {sortedCategories.map((category) => ( + + + {category} + + {errorsByCategory[category].length} error + {errorsByCategory[category].length !== 1 ? 's' : ''} + + } + key={category} + > + {errorsByCategory[category].map((error, idx) => ( +
+
+ {error.description || error.error_type} +
+ + {error.invalid_fields && error.invalid_fields.length > 0 && ( +
+ Check these fields: + {error.invalid_fields.map((field) => ( + + {field} + + ))} +
+ )} + {error.issue_codes && error.issue_codes.length > 0 && ( +
+ Related issue codes: + {error.issue_codes.map((code) => ( + + + Issue {code} + + + ))} +
+ )} +
+ ))} +
+ ))} +
+
+ ); + }; + return (
= ({ database, name }) => { {/* Time Grains */} {renderTimeGrains()} + {/* Troubleshooting / Custom Errors */} + {renderTroubleshooting()} + {/* Compatible Databases */} {renderCompatibleDatabases()} diff --git a/docs/src/components/databases/types.ts b/docs/src/components/databases/types.ts index 698c93e2cca9..d1ad59e74bd7 100644 --- a/docs/src/components/databases/types.ts +++ b/docs/src/components/databases/types.ts @@ -86,6 +86,17 @@ export interface CompatibleDatabase { docs_url?: string; } +export interface CustomError { + error_type: string; // e.g., "CONNECTION_INVALID_USERNAME_ERROR" + message_template: string; // e.g., 'The username "%(username)s" does not exist.' + regex_pattern?: string; // The regex pattern that matches this error (optional, for reference) + regex_name?: string; // The name of the regex constant (e.g., "CONNECTION_INVALID_USERNAME_REGEX") + invalid_fields?: string[]; // Fields that are invalid, e.g., ["username", "password"] + issue_codes?: number[]; // Related issue codes from ISSUE_CODES mapping + category?: string; // Error category: "Authentication", "Connection", "Query", etc. + description?: string; // Human-readable short description of the error type +} + export interface DatabaseDocumentation { description?: string; logo?: string; @@ -111,6 +122,7 @@ export interface DatabaseDocumentation { sqlalchemy_docs_url?: string; advanced_features?: Record; compatible_databases?: CompatibleDatabase[]; + custom_errors?: CustomError[]; // Database-specific error messages and troubleshooting info } export interface TimeGrains { diff --git a/docs/src/data/databases.json b/docs/src/data/databases.json index 35e91f79a90a..060569414692 100644 --- a/docs/src/data/databases.json +++ b/docs/src/data/databases.json @@ -1,5 +1,5 @@ { - "generated": "2026-01-21T21:46:41.044Z", + "generated": "2026-01-21T23:22:58.314Z", "statistics": { "totalDatabases": 67, "withDocumentation": 67, @@ -333,7 +333,19 @@ } } ], - "notes": "URL-encode special characters in s3_staging_dir (e.g., s3:// becomes s3%3A//)." + "notes": "URL-encode special characters in s3_staging_dir (e.g., s3:// becomes s3%3A//).", + "custom_errors": [ + { + "regex_name": "SYNTAX_ERROR_REGEX", + "message_template": "Please check your query for syntax errors at or near \"%(syntax_error)s\". Then, try running your query again.", + "error_type": "SYNTAX_ERROR", + "category": "Query", + "description": "SQL syntax error", + "issue_codes": [ + 1030 + ] + } + ] }, "time_grains": {}, "score": 0, @@ -516,7 +528,62 @@ "warnings": [ "Google BigQuery Python SDK is not compatible with gevent. Use a worker type other than gevent when deploying with gunicorn." ], - "docs_url": "https://github.com/googleapis/python-bigquery-sqlalchemy" + "docs_url": "https://github.com/googleapis/python-bigquery-sqlalchemy", + "custom_errors": [ + { + "regex_name": "CONNECTION_DATABASE_PERMISSIONS_REGEX", + "message_template": "Unable to connect. Verify that the following roles are set on the service account: \"BigQuery Data Viewer\", \"BigQuery Metadata Viewer\", \"BigQuery Job User\" and the following permissions are set \"bigquery.readsessions.create\", \"bigquery.readsessions.getData\"", + "error_type": "CONNECTION_DATABASE_PERMISSIONS_ERROR", + "category": "Permissions", + "description": "Insufficient permissions", + "issue_codes": [ + 1017 + ] + }, + { + "regex_name": "TABLE_DOES_NOT_EXIST_REGEX", + "message_template": "The table \"%(table)s\" does not exist. A valid table must be used to run this query.", + "error_type": "TABLE_DOES_NOT_EXIST_ERROR", + "category": "Query", + "description": "Table not found", + "issue_codes": [ + 1003, + 1005 + ] + }, + { + "regex_name": "COLUMN_DOES_NOT_EXIST_REGEX", + "message_template": "We can't seem to resolve column \"%(column)s\" at line %(location)s.", + "error_type": "COLUMN_DOES_NOT_EXIST_ERROR", + "category": "Query", + "description": "Column not found", + "issue_codes": [ + 1003, + 1004 + ] + }, + { + "regex_name": "SCHEMA_DOES_NOT_EXIST_REGEX", + "message_template": "The schema \"%(schema)s\" does not exist. A valid schema must be used to run this query.", + "error_type": "SCHEMA_DOES_NOT_EXIST_ERROR", + "category": "Query", + "description": "Schema not found", + "issue_codes": [ + 1003, + 1016 + ] + }, + { + "regex_name": "SYNTAX_ERROR_REGEX", + "message_template": "Please check your query for syntax errors at or near \"%(syntax_error)s\". Then, try running your query again.", + "error_type": "SYNTAX_ERROR", + "category": "Query", + "description": "SQL syntax error", + "issue_codes": [ + 1030 + ] + } + ] }, "time_grains": {}, "score": 0, @@ -1214,6 +1281,120 @@ "is_recommended": true, "notes": "Uses PostgreSQL wire protocol." } + ], + "custom_errors": [ + { + "message_template": "Incorrect username or password.", + "error_type": "CONNECTION_INVALID_USERNAME_ERROR", + "category": "Authentication", + "description": "Invalid username", + "issue_codes": [ + 1012 + ], + "invalid_fields": [ + "username", + "password" + ] + }, + { + "message_template": "Please enter a password.", + "error_type": "CONNECTION_ACCESS_DENIED_ERROR", + "category": "Authentication", + "description": "Access denied", + "issue_codes": [ + 1014, + 1015 + ], + "invalid_fields": [ + "password" + ] + }, + { + "message_template": "Hostname \"%(hostname)s\" cannot be resolved.", + "error_type": "CONNECTION_INVALID_HOSTNAME_ERROR", + "category": "Connection", + "description": "Invalid hostname", + "issue_codes": [ + 1007 + ], + "invalid_fields": [ + "host" + ] + }, + { + "message_template": "Server refused the connection: check hostname and port.", + "error_type": "CONNECTION_PORT_CLOSED_ERROR", + "category": "Connection", + "description": "Port closed or refused", + "issue_codes": [ + 1008 + ], + "invalid_fields": [ + "host", + "port" + ] + }, + { + "message_template": "Unable to connect to database \"%(database)s\"", + "error_type": "CONNECTION_UNKNOWN_DATABASE_ERROR", + "category": "Connection", + "description": "Unknown database", + "issue_codes": [ + 1015 + ], + "invalid_fields": [ + "database" + ] + }, + { + "message_template": "Unable to connect to database \"%(database)s\": database does not exist or insufficient permissions", + "error_type": "CONNECTION_DATABASE_PERMISSIONS_ERROR", + "category": "Permissions", + "description": "Insufficient permissions", + "issue_codes": [ + 1017 + ], + "invalid_fields": [ + "database" + ] + }, + { + "message_template": "Please check your query for syntax errors at or near \"%(err)s\". Then, try running your query again.", + "error_type": "SYNTAX_ERROR", + "category": "Query", + "description": "SQL syntax error", + "issue_codes": [ + 1030 + ] + }, + { + "message_template": "Column \"%(column)s\" not found in \"%(view)s\".", + "error_type": "COLUMN_DOES_NOT_EXIST_ERROR", + "category": "Query", + "description": "Column not found", + "issue_codes": [ + 1003, + 1004 + ] + }, + { + "message_template": "Invalid aggregation expression.", + "error_type": "SYNTAX_ERROR", + "category": "Query", + "description": "SQL syntax error", + "issue_codes": [ + 1030 + ] + }, + { + "message_template": "\"%(exp)s\" is neither an aggregation function nor appears in the GROUP BY clause.", + "error_type": "SYNTAX_ERROR", + "category": "Query", + "description": "SQL syntax error", + "issue_codes": [ + 1030 + ] + } ] }, "time_grains": { @@ -1313,6 +1494,73 @@ "APACHE_PROJECTS", "ANALYTICAL_DATABASES", "OPEN_SOURCE" + ], + "custom_errors": [ + { + "regex_name": "CONNECTION_ACCESS_DENIED_REGEX", + "message_template": "Either the username \"%(username)s\" or the password is incorrect.", + "error_type": "CONNECTION_ACCESS_DENIED_ERROR", + "category": "Authentication", + "description": "Access denied", + "issue_codes": [ + 1014, + 1015 + ], + "invalid_fields": [ + "username", + "password" + ] + }, + { + "regex_name": "CONNECTION_INVALID_HOSTNAME_REGEX", + "message_template": "Unknown Doris server host \"%(hostname)s\".", + "error_type": "CONNECTION_INVALID_HOSTNAME_ERROR", + "category": "Connection", + "description": "Invalid hostname", + "issue_codes": [ + 1007 + ], + "invalid_fields": [ + "host" + ] + }, + { + "regex_name": "CONNECTION_HOST_DOWN_REGEX", + "message_template": "The host \"%(hostname)s\" might be down and can't be reached.", + "error_type": "CONNECTION_HOST_DOWN_ERROR", + "category": "Connection", + "description": "Host unreachable", + "issue_codes": [ + 1009 + ], + "invalid_fields": [ + "host", + "port" + ] + }, + { + "regex_name": "CONNECTION_UNKNOWN_DATABASE_REGEX", + "message_template": "Unable to connect to database \"%(database)s\".", + "error_type": "CONNECTION_UNKNOWN_DATABASE_ERROR", + "category": "Connection", + "description": "Unknown database", + "issue_codes": [ + 1015 + ], + "invalid_fields": [ + "database" + ] + }, + { + "regex_name": "SYNTAX_ERROR_REGEX", + "message_template": "Please check your query for syntax errors near \"%(server_error)s\". Then, try running your query again.", + "error_type": "SYNTAX_ERROR", + "category": "Query", + "description": "SQL syntax error", + "issue_codes": [ + 1030 + ] + } ] }, "time_grains": { @@ -1650,6 +1898,19 @@ "HOSTED_OPEN_SOURCE" ] } + ], + "custom_errors": [ + { + "regex_name": "COLUMN_DOES_NOT_EXIST_REGEX", + "message_template": "We can't seem to resolve the column \"%(column_name)s\"", + "error_type": "COLUMN_DOES_NOT_EXIST_ERROR", + "category": "Query", + "description": "Column not found", + "issue_codes": [ + 1003, + 1004 + ] + } ] }, "time_grains": { @@ -1727,7 +1988,20 @@ "database": "MotherDuck database name", "token": "Service token from MotherDuck dashboard" }, - "docs_url": "https://motherduck.com/docs/getting-started/" + "docs_url": "https://motherduck.com/docs/getting-started/", + "custom_errors": [ + { + "regex_name": "COLUMN_DOES_NOT_EXIST_REGEX", + "message_template": "We can't seem to resolve the column \"%(column_name)s\"", + "error_type": "COLUMN_DOES_NOT_EXIST_ERROR", + "category": "Query", + "description": "Column not found", + "issue_codes": [ + 1003, + 1004 + ] + } + ] }, "time_grains": {}, "score": 0, @@ -2159,7 +2433,19 @@ "CLOUD_GCP", "HOSTED_OPEN_SOURCE" ], - "install_instructions": "pip install \"apache-superset[gsheets]\"" + "install_instructions": "pip install \"apache-superset[gsheets]\"", + "custom_errors": [ + { + "regex_name": "SYNTAX_ERROR_REGEX", + "message_template": "Please check your query for syntax errors near \"%(server_error)s\". Then, try running your query again.", + "error_type": "SYNTAX_ERROR", + "category": "Query", + "description": "SQL syntax error", + "issue_codes": [ + 1030 + ] + } + ] }, "time_grains": { "SECOND": true, @@ -2755,7 +3041,50 @@ "notes": "Connection string must be URL-encoded. Special characters like @ need encoding." } ], - "docs_url": "https://docs.sqlalchemy.org/en/20/core/engines.html#escaping-special-characters-such-as-signs-in-passwords" + "docs_url": "https://docs.sqlalchemy.org/en/20/core/engines.html#escaping-special-characters-such-as-signs-in-passwords", + "custom_errors": [ + { + "regex_name": "CONNECTION_ACCESS_DENIED_REGEX", + "message_template": "Either the username \"%(username)s\", password, or database name \"%(database)s\" is incorrect.", + "error_type": "CONNECTION_ACCESS_DENIED_ERROR", + "category": "Authentication", + "description": "Access denied", + "issue_codes": [ + 1014, + 1015 + ] + }, + { + "regex_name": "CONNECTION_INVALID_HOSTNAME_REGEX", + "message_template": "The hostname \"%(hostname)s\" cannot be resolved.", + "error_type": "CONNECTION_INVALID_HOSTNAME_ERROR", + "category": "Connection", + "description": "Invalid hostname", + "issue_codes": [ + 1007 + ] + }, + { + "regex_name": "CONNECTION_PORT_CLOSED_REGEX", + "message_template": "Port %(port)s on hostname \"%(hostname)s\" refused the connection.", + "error_type": "CONNECTION_PORT_CLOSED_ERROR", + "category": "Connection", + "description": "Port closed or refused", + "issue_codes": [ + 1008 + ] + }, + { + "regex_name": "CONNECTION_HOST_DOWN_REGEX", + "message_template": "The host \"%(hostname)s\" might be down, and can't be reached on port %(port)s.", + "error_type": "CONNECTION_HOST_DOWN_ERROR", + "category": "Connection", + "description": "Host unreachable", + "issue_codes": [ + 1009 + ] + } + ] }, "time_grains": { "SECOND": true, @@ -2828,6 +3157,49 @@ "CLOUD_DATA_WAREHOUSES", "ANALYTICAL_DATABASES", "PROPRIETARY" + ], + "custom_errors": [ + { + "regex_name": "CONNECTION_ACCESS_DENIED_REGEX", + "message_template": "Either the username \"%(username)s\", password, or database name \"%(database)s\" is incorrect.", + "error_type": "CONNECTION_ACCESS_DENIED_ERROR", + "category": "Authentication", + "description": "Access denied", + "issue_codes": [ + 1014, + 1015 + ] + }, + { + "regex_name": "CONNECTION_INVALID_HOSTNAME_REGEX", + "message_template": "The hostname \"%(hostname)s\" cannot be resolved.", + "error_type": "CONNECTION_INVALID_HOSTNAME_ERROR", + "category": "Connection", + "description": "Invalid hostname", + "issue_codes": [ + 1007 + ] + }, + { + "regex_name": "CONNECTION_PORT_CLOSED_REGEX", + "message_template": "Port %(port)s on hostname \"%(hostname)s\" refused the connection.", + "error_type": "CONNECTION_PORT_CLOSED_ERROR", + "category": "Connection", + "description": "Port closed or refused", + "issue_codes": [ + 1008 + ] + }, + { + "regex_name": "CONNECTION_HOST_DOWN_REGEX", + "message_template": "The host \"%(hostname)s\" might be down, and can't be reached on port %(port)s.", + "error_type": "CONNECTION_HOST_DOWN_ERROR", + "category": "Connection", + "description": "Host unreachable", + "issue_codes": [ + 1009 + ] + } ] }, "time_grains": {}, @@ -2939,6 +3311,73 @@ "HOSTED_OPEN_SOURCE" ] } + ], + "custom_errors": [ + { + "regex_name": "CONNECTION_ACCESS_DENIED_REGEX", + "message_template": "Either the username \"%(username)s\" or the password is incorrect.", + "error_type": "CONNECTION_ACCESS_DENIED_ERROR", + "category": "Authentication", + "description": "Access denied", + "issue_codes": [ + 1014, + 1015 + ], + "invalid_fields": [ + "username", + "password" + ] + }, + { + "regex_name": "CONNECTION_INVALID_HOSTNAME_REGEX", + "message_template": "Unknown MySQL server host \"%(hostname)s\".", + "error_type": "CONNECTION_INVALID_HOSTNAME_ERROR", + "category": "Connection", + "description": "Invalid hostname", + "issue_codes": [ + 1007 + ], + "invalid_fields": [ + "host" + ] + }, + { + "regex_name": "CONNECTION_HOST_DOWN_REGEX", + "message_template": "The host \"%(hostname)s\" might be down and can't be reached.", + "error_type": "CONNECTION_HOST_DOWN_ERROR", + "category": "Connection", + "description": "Host unreachable", + "issue_codes": [ + 1009 + ], + "invalid_fields": [ + "host", + "port" + ] + }, + { + "regex_name": "CONNECTION_UNKNOWN_DATABASE_REGEX", + "message_template": "Unable to connect to database \"%(database)s\".", + "error_type": "CONNECTION_UNKNOWN_DATABASE_ERROR", + "category": "Connection", + "description": "Unknown database", + "issue_codes": [ + 1015 + ], + "invalid_fields": [ + "database" + ] + }, + { + "regex_name": "SYNTAX_ERROR_REGEX", + "message_template": "Please check your query for syntax errors near \"%(server_error)s\". Then, try running your query again.", + "error_type": "SYNTAX_ERROR", + "category": "Query", + "description": "SQL syntax error", + "issue_codes": [ + 1030 + ] + } ] }, "time_grains": { @@ -3068,6 +3507,73 @@ "categories": [ "TRADITIONAL_RDBMS", "OPEN_SOURCE" + ], + "custom_errors": [ + { + "regex_name": "CONNECTION_ACCESS_DENIED_REGEX", + "message_template": "Either the username \"%(username)s\" or the password is incorrect.", + "error_type": "CONNECTION_ACCESS_DENIED_ERROR", + "category": "Authentication", + "description": "Access denied", + "issue_codes": [ + 1014, + 1015 + ], + "invalid_fields": [ + "username", + "password" + ] + }, + { + "regex_name": "CONNECTION_INVALID_HOSTNAME_REGEX", + "message_template": "Unknown OceanBase server host \"%(hostname)s\".", + "error_type": "CONNECTION_INVALID_HOSTNAME_ERROR", + "category": "Connection", + "description": "Invalid hostname", + "issue_codes": [ + 1007 + ], + "invalid_fields": [ + "host" + ] + }, + { + "regex_name": "CONNECTION_HOST_DOWN_REGEX", + "message_template": "The host \"%(hostname)s\" might be down and can't be reached.", + "error_type": "CONNECTION_HOST_DOWN_ERROR", + "category": "Connection", + "description": "Host unreachable", + "issue_codes": [ + 1009 + ], + "invalid_fields": [ + "host", + "port" + ] + }, + { + "regex_name": "CONNECTION_UNKNOWN_DATABASE_REGEX", + "message_template": "Unable to connect to database \"%(database)s\".", + "error_type": "CONNECTION_UNKNOWN_DATABASE_ERROR", + "category": "Connection", + "description": "Unknown database", + "issue_codes": [ + 1015 + ], + "invalid_fields": [ + "database" + ] + }, + { + "regex_name": "SYNTAX_ERROR_REGEX", + "message_template": "Please check your query for syntax errors near \"%(server_error)s\". Then, try running your query again.", + "error_type": "SYNTAX_ERROR", + "category": "Query", + "description": "SQL syntax error", + "issue_codes": [ + 1030 + ] + } ] }, "time_grains": {}, @@ -3099,7 +3605,96 @@ "sqlalchemy-ocient" ], "connection_string": "ocient://{username}:{password}@{host}:{port}/{database}", - "install_instructions": "pip install sqlalchemy-ocient" + "install_instructions": "pip install sqlalchemy-ocient", + "custom_errors": [ + { + "regex_name": "CONNECTION_INVALID_USERNAME_REGEX", + "message_template": "The username \"%(username)s\" does not exist.", + "error_type": "CONNECTION_INVALID_USERNAME_ERROR", + "category": "Authentication", + "description": "Invalid username", + "issue_codes": [ + 1012 + ] + }, + { + "regex_name": "CONNECTION_INVALID_PASSWORD_REGEX", + "message_template": "The user/password combination is not valid (Incorrect password for user).", + "error_type": "CONNECTION_INVALID_PASSWORD_ERROR", + "category": "Authentication", + "description": "Invalid password", + "issue_codes": [ + 1013 + ] + }, + { + "regex_name": "CONNECTION_UNKNOWN_DATABASE_REGEX", + "message_template": "Could not connect to database: \"%(database)s\"", + "error_type": "CONNECTION_UNKNOWN_DATABASE_ERROR", + "category": "Connection", + "description": "Unknown database", + "issue_codes": [ + 1015 + ] + }, + { + "regex_name": "CONNECTION_INVALID_HOSTNAME_REGEX", + "message_template": "Could not resolve hostname: \"%(host)s\".", + "error_type": "CONNECTION_INVALID_HOSTNAME_ERROR", + "category": "Connection", + "description": "Invalid hostname", + "issue_codes": [ + 1007 + ] + }, + { + "regex_name": "CONNECTION_INVALID_PORT_ERROR", + "message_template": "Port out of range 0-65535", + "error_type": "CONNECTION_INVALID_PORT_ERROR" + }, + { + "regex_name": "INVALID_CONNECTION_STRING_REGEX", + "message_template": "Invalid Connection String: Expecting String of the form 'ocient://user:pass@host:port/database'.", + "error_type": "GENERIC_DB_ENGINE_ERROR", + "category": "General", + "description": "Database engine error", + "issue_codes": [ + 1002 + ] + }, + { + "regex_name": "SYNTAX_ERROR_REGEX", + "message_template": "Syntax Error: %(qualifier)s input \"%(input)s\" expecting \"%(expected)s", + "error_type": "SYNTAX_ERROR", + "category": "Query", + "description": "SQL syntax error", + "issue_codes": [ + 1030 + ] + }, + { + "regex_name": "TABLE_DOES_NOT_EXIST_REGEX", + "message_template": "Table or View \"%(table)s\" does not exist.", + "error_type": "TABLE_DOES_NOT_EXIST_ERROR", + "category": "Query", + "description": "Table not found", + "issue_codes": [ + 1003, + 1005 + ] + }, + { + "regex_name": "COLUMN_DOES_NOT_EXIST_REGEX", + "message_template": "Invalid reference to column: \"%(column)s\"", + "error_type": "COLUMN_DOES_NOT_EXIST_ERROR", + "category": "Query", + "description": "Column not found", + "issue_codes": [ + 1003, + 1004 + ] + } + ] }, "time_grains": { "SECOND": true, @@ -3446,6 +4041,124 @@ "HOSTED_OPEN_SOURCE" ] } + ], + "custom_errors": [ + { + "regex_name": "CONNECTION_INVALID_USERNAME_REGEX", + "message_template": "The username \"%(username)s\" does not exist.", + "error_type": "CONNECTION_INVALID_USERNAME_ERROR", + "category": "Authentication", + "description": "Invalid username", + "issue_codes": [ + 1012 + ], + "invalid_fields": [ + "username" + ] + }, + { + "regex_name": "CONNECTION_INVALID_PASSWORD_REGEX", + "message_template": "The password provided for username \"%(username)s\" is incorrect.", + "error_type": "CONNECTION_INVALID_PASSWORD_ERROR", + "category": "Authentication", + "description": "Invalid password", + "issue_codes": [ + 1013 + ], + "invalid_fields": [ + "username", + "password" + ] + }, + { + "regex_name": "CONNECTION_INVALID_PASSWORD_NEEDED_REGEX", + "message_template": "Please re-enter the password.", + "error_type": "CONNECTION_ACCESS_DENIED_ERROR", + "category": "Authentication", + "description": "Access denied", + "issue_codes": [ + 1014, + 1015 + ], + "invalid_fields": [ + "password" + ] + }, + { + "regex_name": "CONNECTION_INVALID_HOSTNAME_REGEX", + "message_template": "The hostname \"%(hostname)s\" cannot be resolved.", + "error_type": "CONNECTION_INVALID_HOSTNAME_ERROR", + "category": "Connection", + "description": "Invalid hostname", + "issue_codes": [ + 1007 + ], + "invalid_fields": [ + "host" + ] + }, + { + "regex_name": "CONNECTION_PORT_CLOSED_REGEX", + "message_template": "Port %(port)s on hostname \"%(hostname)s\" refused the connection.", + "error_type": "CONNECTION_PORT_CLOSED_ERROR", + "category": "Connection", + "description": "Port closed or refused", + "issue_codes": [ + 1008 + ], + "invalid_fields": [ + "host", + "port" + ] + }, + { + "regex_name": "CONNECTION_HOST_DOWN_REGEX", + "message_template": "The host \"%(hostname)s\" might be down, and can't be reached on port %(port)s.", + "error_type": "CONNECTION_HOST_DOWN_ERROR", + "category": "Connection", + "description": "Host unreachable", + "issue_codes": [ + 1009 + ], + "invalid_fields": [ + "host", + "port" + ] + }, + { + "regex_name": "CONNECTION_UNKNOWN_DATABASE_REGEX", + "message_template": "Unable to connect to database \"%(database)s\".", + "error_type": "CONNECTION_UNKNOWN_DATABASE_ERROR", + "category": "Connection", + "description": "Unknown database", + "issue_codes": [ + 1015 + ], + "invalid_fields": [ + "database" + ] + }, + { + "regex_name": "COLUMN_DOES_NOT_EXIST_REGEX", + "message_template": "We can't seem to resolve the column \"%(column_name)s\" at line %(location)s.", + "error_type": "COLUMN_DOES_NOT_EXIST_ERROR", + "category": "Query", + "description": "Column not found", + "issue_codes": [ + 1003, + 1004 + ] + }, + { + "regex_name": "SYNTAX_ERROR_REGEX", + "message_template": "Please check your query for syntax errors at or near \"%(syntax_error)s\". Then, try running your query again.", + "error_type": "SYNTAX_ERROR", + "category": "Query", + "description": "SQL syntax error", + "issue_codes": [ + 1030 + ] + } ] }, "time_grains": { @@ -3515,6 +4228,92 @@ "connection_string": "presto://{hostname}:{port}/{database}", "is_recommended": true } + ], + "custom_errors": [ + { + "regex_name": "COLUMN_DOES_NOT_EXIST_REGEX", + "message_template": "We can't seem to resolve the column \"%(column_name)s\" at line %(location)s.", + "error_type": "COLUMN_DOES_NOT_EXIST_ERROR", + "category": "Query", + "description": "Column not found", + "issue_codes": [ + 1003, + 1004 + ] + }, + { + "regex_name": "TABLE_DOES_NOT_EXIST_REGEX", + "message_template": "The table \"%(table_name)s\" does not exist. A valid table must be used to run this query.", + "error_type": "TABLE_DOES_NOT_EXIST_ERROR", + "category": "Query", + "description": "Table not found", + "issue_codes": [ + 1003, + 1005 + ] + }, + { + "regex_name": "SCHEMA_DOES_NOT_EXIST_REGEX", + "message_template": "The schema \"%(schema_name)s\" does not exist. A valid schema must be used to run this query.", + "error_type": "SCHEMA_DOES_NOT_EXIST_ERROR", + "category": "Query", + "description": "Schema not found", + "issue_codes": [ + 1003, + 1016 + ] + }, + { + "regex_name": "CONNECTION_ACCESS_DENIED_REGEX", + "message_template": "Either the username \"%(username)s\" or the password is incorrect.", + "error_type": "CONNECTION_ACCESS_DENIED_ERROR", + "category": "Authentication", + "description": "Access denied", + "issue_codes": [ + 1014, + 1015 + ] + }, + { + "regex_name": "CONNECTION_INVALID_HOSTNAME_REGEX", + "message_template": "The hostname \"%(hostname)s\" cannot be resolved.", + "error_type": "CONNECTION_INVALID_HOSTNAME_ERROR", + "category": "Connection", + "description": "Invalid hostname", + "issue_codes": [ + 1007 + ] + }, + { + "regex_name": "CONNECTION_HOST_DOWN_REGEX", + "message_template": "The host \"%(hostname)s\" might be down, and can't be reached on port %(port)s.", + "error_type": "CONNECTION_HOST_DOWN_ERROR", + "category": "Connection", + "description": "Host unreachable", + "issue_codes": [ + 1009 + ] + }, + { + "regex_name": "CONNECTION_PORT_CLOSED_REGEX", + "message_template": "Port %(port)s on hostname \"%(hostname)s\" refused the connection.", + "error_type": "CONNECTION_PORT_CLOSED_ERROR", + "category": "Connection", + "description": "Port closed or refused", + "issue_codes": [ + 1008 + ] + }, + { + "regex_name": "CONNECTION_UNKNOWN_DATABASE_ERROR", + "message_template": "Unable to connect to catalog named \"%(catalog_name)s\".", + "error_type": "CONNECTION_UNKNOWN_DATABASE_ERROR", + "category": "Connection", + "description": "Unknown database", + "issue_codes": [ + 1015 + ] + } ] }, "time_grains": { @@ -3625,6 +4424,77 @@ } } } + ], + "custom_errors": [ + { + "regex_name": "CONNECTION_ACCESS_DENIED_REGEX", + "message_template": "Either the username \"%(username)s\" or the password is incorrect.", + "error_type": "CONNECTION_ACCESS_DENIED_ERROR", + "category": "Authentication", + "description": "Access denied", + "issue_codes": [ + 1014, + 1015 + ], + "invalid_fields": [ + "username", + "password" + ] + }, + { + "regex_name": "CONNECTION_INVALID_HOSTNAME_REGEX", + "message_template": "The hostname \"%(hostname)s\" cannot be resolved.", + "error_type": "CONNECTION_INVALID_HOSTNAME_ERROR", + "category": "Connection", + "description": "Invalid hostname", + "issue_codes": [ + 1007 + ], + "invalid_fields": [ + "host" + ] + }, + { + "regex_name": "CONNECTION_PORT_CLOSED_REGEX", + "message_template": "Port %(port)s on hostname \"%(hostname)s\" refused the connection.", + "error_type": "CONNECTION_PORT_CLOSED_ERROR", + "category": "Connection", + "description": "Port closed or refused", + "issue_codes": [ + 1008 + ], + "invalid_fields": [ + "host", + "port" + ] + }, + { + "regex_name": "CONNECTION_HOST_DOWN_REGEX", + "message_template": "The host \"%(hostname)s\" might be down, and can't be reached on port %(port)s.", + "error_type": "CONNECTION_HOST_DOWN_ERROR", + "category": "Connection", + "description": "Host unreachable", + "issue_codes": [ + 1009 + ], + "invalid_fields": [ + "host", + "port" + ] + }, + { + "regex_name": "CONNECTION_UNKNOWN_DATABASE_REGEX", + "message_template": "We were unable to connect to your database named \"%(database)s\". Please verify your database name and try again.", + "error_type": "CONNECTION_UNKNOWN_DATABASE_ERROR", + "category": "Connection", + "description": "Unknown database", + "issue_codes": [ + 1015 + ], + "invalid_fields": [ + "database" + ] + } ] }, "time_grains": {}, @@ -3858,7 +4728,29 @@ } ], "notes": "Schema is not required in connection string. Ensure user has privileges for all databases/schemas/tables/views/warehouses.", - "docs_url": "https://docs.snowflake.com/en/user-guide/key-pair-auth.html" + "docs_url": "https://docs.snowflake.com/en/user-guide/key-pair-auth.html", + "custom_errors": [ + { + "regex_name": "OBJECT_DOES_NOT_EXIST_REGEX", + "message_template": "%(object)s does not exist in this database.", + "error_type": "OBJECT_DOES_NOT_EXIST_ERROR", + "category": "Query", + "description": "Object not found", + "issue_codes": [ + 1029 + ] + }, + { + "regex_name": "SYNTAX_ERROR_REGEX", + "message_template": "Please check your query for syntax errors at or near \"%(syntax_error)s\". Then, try running your query again.", + "error_type": "SYNTAX_ERROR", + "category": "Query", + "description": "SQL syntax error", + "issue_codes": [ + 1030 + ] + } + ] }, "time_grains": { "SECOND": true, @@ -4039,7 +4931,20 @@ ], "pypi_packages": [], "connection_string": "sqlite:///path/to/file.db?check_same_thread=false", - "notes": "No additional library needed. SQLite is bundled with Python." + "notes": "No additional library needed. SQLite is bundled with Python.", + "custom_errors": [ + { + "regex_name": "COLUMN_DOES_NOT_EXIST_REGEX", + "message_template": "We can't seem to resolve the column \"%(column_name)s\"", + "error_type": "COLUMN_DOES_NOT_EXIST_ERROR", + "category": "Query", + "description": "Column not found", + "issue_codes": [ + 1003, + 1004 + ] + } + ] }, "time_grains": { "SECOND": true, @@ -4183,6 +5088,36 @@ }, "docs_url": "https://docs.celerdata.com/" } + ], + "custom_errors": [ + { + "regex_name": "CONNECTION_ACCESS_DENIED_REGEX", + "message_template": "Either the username \"%(username)s\" or the password is incorrect.", + "error_type": "CONNECTION_ACCESS_DENIED_ERROR", + "category": "Authentication", + "description": "Access denied", + "issue_codes": [ + 1014, + 1015 + ], + "invalid_fields": [ + "username", + "password" + ] + }, + { + "regex_name": "CONNECTION_UNKNOWN_DATABASE_REGEX", + "message_template": "Unable to connect to database \"%(database)s\".", + "error_type": "CONNECTION_UNKNOWN_DATABASE_ERROR", + "category": "Connection", + "description": "Unknown database", + "issue_codes": [ + 1015 + ], + "invalid_fields": [ + "database" + ] + } ] }, "time_grains": { diff --git a/docs/static/img/logos/club25deagosto.svg b/docs/static/img/logos/club25deagosto.svg new file mode 100644 index 000000000000..3ad7e0da9f5c --- /dev/null +++ b/docs/static/img/logos/club25deagosto.svg @@ -0,0 +1,220 @@ + + + +CA25DEAGOSTO diff --git a/superset-frontend/packages/superset-core/src/ui/theme/types.ts b/superset-frontend/packages/superset-core/src/ui/theme/types.ts index 03c5e8d45a39..ac905393f9af 100644 --- a/superset-frontend/packages/superset-core/src/ui/theme/types.ts +++ b/superset-frontend/packages/superset-core/src/ui/theme/types.ts @@ -119,6 +119,7 @@ export interface SupersetSpecificTokens { // Brand-related brandIconMaxWidth: number; + brandAppName?: string; brandLogoAlt: string; brandLogoUrl: string; brandLogoMargin: string; diff --git a/superset-frontend/src/dashboard/containers/DashboardPage.tsx b/superset-frontend/src/dashboard/containers/DashboardPage.tsx index 28790b0c2532..6dd2ebd52560 100644 --- a/superset-frontend/src/dashboard/containers/DashboardPage.tsx +++ b/superset-frontend/src/dashboard/containers/DashboardPage.tsx @@ -223,15 +223,28 @@ export const DashboardPage: FC = ({ idOrSlug }: PageProps) => { // eslint-disable-next-line react-hooks/exhaustive-deps }, [readyToRender]); + // Capture original title before any effects run + const originalTitle = useMemo(() => document.title, []); + + // Update document title when dashboard title changes useEffect(() => { if (dashboard_title) { document.title = dashboard_title; } - return () => { - document.title = 'Superset'; - }; }, [dashboard_title]); + // Restore original title on unmount + useEffect( + () => () => { + document.title = + originalTitle || + theme?.brandAppName || + theme?.brandLogoAlt || + 'Superset'; + }, + [originalTitle, theme?.brandAppName, theme?.brandLogoAlt], + ); + useEffect(() => { if (typeof css === 'string') { // returning will clean up custom css diff --git a/superset-frontend/src/explore/components/ExploreViewContainer/index.jsx b/superset-frontend/src/explore/components/ExploreViewContainer/index.jsx index f8556a19ce37..03cea2fa0a18 100644 --- a/superset-frontend/src/explore/components/ExploreViewContainer/index.jsx +++ b/superset-frontend/src/explore/components/ExploreViewContainer/index.jsx @@ -291,15 +291,28 @@ function ExploreViewContainer(props) { const theme = useTheme(); + // Capture original title before any effects run + const originalTitle = useMemo(() => document.title, []); + + // Update document title when slice name changes useEffect(() => { if (props.sliceName) { document.title = props.sliceName; } - return () => { - document.title = 'Superset'; - }; }, [props.sliceName]); + // Restore original title on unmount + useEffect( + () => () => { + document.title = + originalTitle || + theme?.brandAppName || + theme?.brandLogoAlt || + 'Superset'; + }, + [originalTitle, theme?.brandAppName, theme?.brandLogoAlt], + ); + const addHistory = useCallback( async ({ isReplace = false, title } = {}) => { const formData = props.dashboardId diff --git a/superset/config.py b/superset/config.py index 995a5268f324..f6a7b7dfdcab 100644 --- a/superset/config.py +++ b/superset/config.py @@ -902,6 +902,8 @@ class D3TimeFormat(TypedDict, total=False): THEME_DEFAULT: Theme = { "token": { # Brand + # Application name for window titles + "brandAppName": APP_NAME, "brandLogoAlt": "Apache Superset", "brandLogoUrl": APP_ICON, "brandLogoMargin": "18px 0", diff --git a/superset/templates/superset/spa.html b/superset/templates/superset/spa.html index 3597e7947079..80e06ae0b328 100644 --- a/superset/templates/superset/spa.html +++ b/superset/templates/superset/spa.html @@ -24,6 +24,8 @@ {% block title %} {% if title %} {{ title }} + {% else %} + {{ default_title | default('Superset') }} {% endif %} {% endblock %} diff --git a/superset/views/base.py b/superset/views/base.py index d4178cac7939..f8d44f0f9943 100644 --- a/superset/views/base.py +++ b/superset/views/base.py @@ -16,6 +16,7 @@ # under the License. from __future__ import annotations +import copy import functools import logging import os @@ -567,10 +568,39 @@ def get_spa_template_context( """ payload = get_spa_payload(extra_bootstrap_data) - # Extract theme data for template access - theme_data = get_theme_bootstrap_data().get("theme", {}) + # Deep copy theme data to avoid mutating cached bootstrap payload + theme_data = copy.deepcopy(payload.get("common", {}).get("theme", {})) default_theme = theme_data.get("default", {}) - theme_tokens = default_theme.get("token", {}) + dark_theme = theme_data.get("dark", {}) + + # Apply brandAppName fallback to both default and dark themes + # Priority: theme brandAppName > APP_NAME config > "Superset" default + app_name_from_config = app.config.get("APP_NAME", "Superset") + for theme_config in [default_theme, dark_theme]: + if not theme_config: + continue + # Get or create token dict + if "token" not in theme_config: + theme_config["token"] = {} + theme_tokens = theme_config["token"] + + if ( + not theme_tokens.get("brandAppName") + or theme_tokens.get("brandAppName") == "Superset" + ): + # If brandAppName not set or is default, check if APP_NAME customized + if app_name_from_config != "Superset": + # User has customized APP_NAME, use it as brandAppName + theme_tokens["brandAppName"] = app_name_from_config + + # Write the modified theme data back to payload + if "common" not in payload: + payload["common"] = {} + payload["common"]["theme"] = theme_data + + # Extract theme tokens for template access (after fallback applied) + # Use the direct reference to ensure we get the modified token dict + theme_tokens = default_theme.get("token", {}) if default_theme else {} # Determine spinner content with precedence: theme SVG > theme URL > default SVG spinner_svg = None @@ -581,6 +611,9 @@ def get_spa_template_context( # No custom URL either, use default SVG spinner_svg = get_default_spinner_svg() + # Determine default title using the (potentially updated) brandAppName + default_title = theme_tokens.get("brandAppName", "Superset") + return { "entry": entry, "bootstrap_data": json.dumps( @@ -588,6 +621,7 @@ def get_spa_template_context( ), "theme_tokens": theme_tokens, "spinner_svg": spinner_svg, + "default_title": default_title, **template_kwargs, } diff --git a/tests/unit_tests/views/test_base_theme_helpers.py b/tests/unit_tests/views/test_base_theme_helpers.py index 4f29480afdc5..3b3ac972bbe2 100644 --- a/tests/unit_tests/views/test_base_theme_helpers.py +++ b/tests/unit_tests/views/test_base_theme_helpers.py @@ -530,3 +530,305 @@ def test_ui_admin_disabled_no_config_themes(self, mock_get_config, mock_app): assert result["theme"]["enableUiThemeAdministration"] is False assert result["theme"]["default"] == {} assert result["theme"]["dark"] == {} + + +class TestBrandAppNameFallback: + """Test brandAppName fallback mechanism for APP_NAME migration (issue #34865)""" + + @patch("superset.views.base.get_spa_payload") + @patch("superset.views.base.app") + def test_brandappname_uses_theme_value_when_set(self, mock_app, mock_payload): + """Test that explicit brandAppName in theme takes precedence""" + from superset.views.base import get_spa_template_context + + # Use a plain dict for config to mirror Flask's config mapping behavior + mock_app.config = {"APP_NAME": "Fallback App Name"} + + # Mock payload with theme data that has custom brandAppName + mock_payload.return_value = { + "common": { + "theme": { + "default": { + "token": { + "brandAppName": "My Custom App", + "brandLogoAlt": "Logo Alt", + } + } + } + } + } + + result = get_spa_template_context("app") + + # Should use the theme's brandAppName + assert result["default_title"] == "My Custom App" + # Theme tokens should have brandAppName + theme_tokens = result["theme_tokens"] + assert theme_tokens["brandAppName"] == "My Custom App" + + @patch("superset.views.base.get_spa_payload") + @patch("superset.views.base.app") + def test_brandappname_falls_back_to_app_name_config(self, mock_app, mock_payload): + """Test fallback to APP_NAME config when brandAppName not in theme""" + from superset.views.base import get_spa_template_context + + mock_app.config = MagicMock() + mock_app.config.get.side_effect = lambda k, d=None: { + "APP_NAME": "My Test Analytics Platform", + }.get(k, d) + + # Mock payload with default "Superset" brandAppName + mock_payload.return_value = { + "common": { + "theme": { + "default": { + "token": { + "brandAppName": "Superset", # Default value + "brandLogoAlt": "Apache Superset", + } + } + } + } + } + + result = get_spa_template_context("app") + + # Should fall back to APP_NAME config + assert result["default_title"] == "My Test Analytics Platform" + # Theme tokens should be updated with APP_NAME value + theme_tokens = result["theme_tokens"] + assert theme_tokens["brandAppName"] == "My Test Analytics Platform" + + @patch("superset.views.base.get_spa_payload") + @patch("superset.views.base.app") + def test_brandappname_uses_superset_default_when_nothing_set( + self, mock_app, mock_payload + ): + """Test fallback to 'Superset' when neither is customized""" + from superset.views.base import get_spa_template_context + + mock_app.config = MagicMock() + mock_app.config.get.side_effect = lambda k, d=None: { + "APP_NAME": "Superset", # Default value + }.get(k, d) + + # Mock payload with default "Superset" brandAppName + mock_payload.return_value = { + "common": { + "theme": { + "default": { + "token": { + "brandAppName": "Superset", # Default value + "brandLogoAlt": "Apache Superset", + } + } + } + } + } + + result = get_spa_template_context("app") + + # Should use default "Superset" + assert result["default_title"] == "Superset" + # Theme tokens should keep "Superset" + theme_tokens = result["theme_tokens"] + assert theme_tokens["brandAppName"] == "Superset" + + @patch("superset.views.base.get_spa_payload") + @patch("superset.views.base.app") + def test_brandappname_empty_string_falls_back(self, mock_app, mock_payload): + """Test that empty string brandAppName triggers fallback""" + from superset.views.base import get_spa_template_context + + mock_app.config = MagicMock() + mock_app.config.get.side_effect = lambda k, d=None: { + "APP_NAME": "Custom App", + }.get(k, d) + + # Mock payload with empty brandAppName + mock_payload.return_value = { + "common": { + "theme": { + "default": { + "token": { + "brandAppName": "", # Empty string + "brandLogoAlt": "Logo", + } + } + } + } + } + + result = get_spa_template_context("app") + + # Should fall back to APP_NAME + assert result["default_title"] == "Custom App" + theme_tokens = result["theme_tokens"] + assert theme_tokens["brandAppName"] == "Custom App" + + @patch("superset.views.base.get_spa_payload") + @patch("superset.views.base.app") + def test_brandappname_none_falls_back(self, mock_app, mock_payload): + """Test that missing brandAppName triggers fallback""" + from superset.views.base import get_spa_template_context + + mock_app.config = MagicMock() + mock_app.config.get.side_effect = lambda k, d=None: { + "APP_NAME": "Analytics Dashboard", + }.get(k, d) + + # Mock payload without brandAppName + mock_payload.return_value = { + "common": {"theme": {"default": {"token": {"brandLogoAlt": "Logo"}}}} + } + + result = get_spa_template_context("app") + + # Should fall back to APP_NAME + assert result["default_title"] == "Analytics Dashboard" + theme_tokens = result["theme_tokens"] + assert theme_tokens["brandAppName"] == "Analytics Dashboard" + + @patch("superset.views.base.get_spa_payload") + @patch("superset.views.base.app") + def test_brandappname_updates_both_default_and_dark_themes( + self, mock_app, mock_payload + ): + """Test that brandAppName fallback applies to both default and dark themes""" + from superset.views.base import get_spa_template_context + + mock_app.config = MagicMock() + mock_app.config.get.side_effect = lambda k, d=None: { + "APP_NAME": "Multi Theme App", + }.get(k, d) + + # Mock payload with both themes missing brandAppName + mock_payload.return_value = { + "common": { + "theme": { + "default": { + "token": { + "brandAppName": "Superset", # Default value + "colorPrimary": "#111", + } + }, + "dark": { + "token": { + # Missing brandAppName + "colorPrimary": "#222", + } + }, + } + } + } + + result = get_spa_template_context("app") + + # Should update both themes + assert result["default_title"] == "Multi Theme App" + # Verify default theme was updated + theme_tokens = result["theme_tokens"] + assert theme_tokens["brandAppName"] == "Multi Theme App" + assert theme_tokens["colorPrimary"] == "#111" # Preserved + + @patch("superset.views.base.get_spa_payload") + @patch("superset.views.base.app") + def test_brandappname_does_not_mutate_cached_payload(self, mock_app, mock_payload): + """Test that brandAppName fallback doesn't mutate the cached payload""" + from superset.views.base import get_spa_template_context + + mock_app.config = MagicMock() + mock_app.config.get.side_effect = lambda k, d=None: { + "APP_NAME": "Test App", + }.get(k, d) + + # Create a payload that simulates cached data + original_theme_data = { + "default": { + "token": { + "brandAppName": "Superset", + "colorPrimary": "#333", + } + } + } + + mock_payload.return_value = {"common": {"theme": original_theme_data}} + + # Call get_spa_template_context + result = get_spa_template_context("app") + + # Verify the function result has the updated brandAppName + assert result["default_title"] == "Test App" + theme_tokens = result["theme_tokens"] + assert theme_tokens["brandAppName"] == "Test App" + + # Verify the original mock payload structure wasn't mutated + # (the function should deep copy before mutating) + # Note: We can't easily test the cached payload immutability + # without more complex mocking, but we've verified the result is correct + assert result["default_title"] == "Test App" + + @patch("superset.views.base.get_spa_payload") + @patch("superset.views.base.app") + def test_brandappname_handles_empty_theme_config(self, mock_app, mock_payload): + """Test that empty theme configs are skipped gracefully""" + from superset.views.base import get_spa_template_context + + mock_app.config = {"APP_NAME": "Test App"} + + # Mock payload with empty dark theme + mock_payload.return_value = { + "common": { + "theme": { + "default": {"token": {"brandAppName": "Superset"}}, + "dark": {}, # Empty theme config + } + } + } + + result = get_spa_template_context("app") + + # Should handle empty theme gracefully and still update default + assert result["default_title"] == "Test App" + + @patch("superset.views.base.get_spa_payload") + @patch("superset.views.base.app") + def test_brandappname_creates_token_dict_when_missing(self, mock_app, mock_payload): + """Test that token dict is created when missing from theme config""" + from superset.views.base import get_spa_template_context + + mock_app.config = {"APP_NAME": "Token Test App"} + + # Mock payload with theme missing token dict + mock_payload.return_value = { + "common": { + "theme": { + "default": {"algorithm": "default"}, # No token dict + "dark": {"algorithm": "dark"}, # No token dict + } + } + } + + result = get_spa_template_context("app") + + # Should create token dict and set brandAppName + assert result["default_title"] == "Token Test App" + assert result["theme_tokens"]["brandAppName"] == "Token Test App" + + @patch("superset.views.base.get_spa_payload") + @patch("superset.views.base.app") + def test_brandappname_handles_missing_common_in_payload( + self, mock_app, mock_payload + ): + """Test handling when common dict is missing from payload""" + from superset.views.base import get_spa_template_context + + mock_app.config = {"APP_NAME": "Superset"} + + # Mock payload without common dict + mock_payload.return_value = {} + + result = get_spa_template_context("app") + + # Should handle gracefully and use default title + assert result["default_title"] == "Superset"