Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion docs/docusaurus.config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ if (!versionsConfig.developer_portal.disabled && !versionsConfig.developer_porta
{
type: 'doc',
docsPluginId: 'developer_portal',
docId: 'extensions/architectural-principles',
docId: 'extensions/overview',
label: 'Extensions',
},
{
Expand Down
2 changes: 1 addition & 1 deletion docs/src/data/databases.json
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{
"generated": "2026-01-19T22:38:23.768Z",
"generated": "2026-01-21T21:46:41.044Z",
"statistics": {
"totalDatabases": 67,
"withDocumentation": 67,
Expand Down
2 changes: 1 addition & 1 deletion docs/versions-config.json
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
},
"developer_portal": {
"disabled": false,
"hideFromNav": true,
"hideFromNav": false,
"lastVersion": "current",
"includeCurrentVersion": true,
"onlyIncludeVersions": [
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -159,6 +159,13 @@ export interface SupersetSpecificTokens {
echartsOptionsOverridesByChartType?: {
[chartType: string]: any;
};

// Editor-related
/**
* Background color for code editor text selection.
* Defaults to colorPrimaryBgHover if not specified.
*/
colorEditorSelection?: string;
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -292,7 +292,8 @@ export function AsyncAceEditor(
}
/* Adjust selection color */
.ace_editor .ace_selection {
background-color: ${token.colorPrimaryBgHover} !important;
background-color: ${token.colorEditorSelection ??
token.colorPrimaryBgHover} !important;
}

/* Improve active line highlighting */
Expand Down
4 changes: 4 additions & 0 deletions superset/commands/dashboard/export_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -175,6 +175,10 @@ def export_dataset_yaml(
"schema": None, # Don't export - use target database's default schema
# Preserve SQL for virtual datasets, None for physical (data is in parquet)
"sql": dataset.sql if is_preserved_virtual else None,
# Track source database engine for SQL transpilation during import
"source_db_engine": (
dataset.database.db_engine_spec.engine if is_preserved_virtual else None
),
"params": None, # Don't export - contains stale import metadata
"template_params": dataset.template_params,
"filter_select_enabled": dataset.filter_select_enabled,
Expand Down
58 changes: 58 additions & 0 deletions superset/commands/importers/v1/examples.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,13 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
from typing import Any, Optional

from marshmallow import Schema
from sqlalchemy.exc import MultipleResultsFound

from superset import db
from superset.charts.schemas import ImportV1ChartSchema
from superset.commands.chart.importers.v1 import ImportChartsCommand
from superset.commands.chart.importers.v1.utils import import_chart
Expand All @@ -41,9 +43,62 @@
from superset.dashboards.schemas import ImportV1DashboardSchema
from superset.databases.schemas import ImportV1DatabaseSchema
from superset.datasets.schemas import ImportV1DatasetSchema
from superset.exceptions import QueryClauseValidationException
from superset.models.core import Database
from superset.sql.parse import transpile_to_dialect
from superset.utils.core import get_example_default_schema
from superset.utils.decorators import transaction

logger = logging.getLogger(__name__)


def transpile_virtual_dataset_sql(config: dict[str, Any], database_id: int) -> None:
"""
Transpile virtual dataset SQL to the target database dialect.

This ensures that virtual datasets exported from one database type
(e.g., PostgreSQL) can be loaded into a different database type
(e.g., MySQL, DuckDB, SQLite).

Args:
config: Dataset configuration dict (modified in place)
database_id: ID of the target database
"""
sql = config.get("sql")
if not sql:
return

database = db.session.query(Database).get(database_id)
if not database:
logger.warning("Database %s not found, skipping SQL transpilation", database_id)
return

target_engine = database.db_engine_spec.engine
source_engine = config.get("source_db_engine")
if target_engine == source_engine:
logger.info("Source and target dialects are identical, skipping transpilation")
return

try:
transpiled_sql = transpile_to_dialect(sql, target_engine, source_engine)
if transpiled_sql != sql:
logger.info(
"Transpiled virtual dataset SQL for '%s' from %s to %s dialect",
config.get("table_name", "unknown"),
source_engine or "generic",
target_engine,
)
config["sql"] = transpiled_sql
except QueryClauseValidationException as ex:
logger.warning(
"Could not transpile SQL for dataset '%s' from %s to %s: %s. "
"Using original SQL which may not be compatible.",
config.get("table_name", "unknown"),
source_engine or "generic",
target_engine,
ex,
)


class ImportExamplesCommand(ImportModelsCommand):
"""Import examples"""
Expand Down Expand Up @@ -119,6 +174,9 @@ def _import( # pylint: disable=too-many-locals, too-many-branches # noqa: C901
if config["schema"] is None:
config["schema"] = get_example_default_schema()

# transpile virtual dataset SQL to target database dialect
transpile_virtual_dataset_sql(config, config["database_id"])

try:
dataset = import_dataset(
config,
Expand Down
7 changes: 7 additions & 0 deletions superset/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -929,6 +929,8 @@ class D3TimeFormat(TypedDict, total=False):
"fontWeightNormal": "400",
"fontWeightLight": "300",
"fontWeightStrong": "500",
# Editor selection color (for SQL Lab text highlighting)
"colorEditorSelection": "#fff5cf",
},
"algorithm": "default",
}
Expand All @@ -938,6 +940,11 @@ class D3TimeFormat(TypedDict, total=False):
# Set to None to disable dark mode
THEME_DARK: Optional[Theme] = {
**THEME_DEFAULT,
"token": {
**THEME_DEFAULT["token"],
# Darker selection color for dark mode
"colorEditorSelection": "#5c4d1a",
},
"algorithm": "dark",
}

Expand Down
7 changes: 6 additions & 1 deletion superset/dataframe.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,9 @@ def df_to_records(dframe: pd.DataFrame) -> list[dict[str, Any]]:
"""
Convert a DataFrame to a set of records.

NaN values are converted to None for JSON compatibility.
This handles division by zero and other operations that produce NaN.

:param dframe: the DataFrame to convert
:returns: a list of dictionaries reflecting each single row of the DataFrame
"""
Expand All @@ -52,6 +55,8 @@ def df_to_records(dframe: pd.DataFrame) -> list[dict[str, Any]]:

for record in records:
for key in record:
record[key] = _convert_big_integers(record[key])
record[key] = (
None if pd.isna(record[key]) else _convert_big_integers(record[key])
)

return records
2 changes: 2 additions & 0 deletions superset/datasets/schemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -322,6 +322,8 @@ def fix_extra(self, data: dict[str, Any], **kwargs: Any) -> dict[str, Any]:
schema = fields.String(allow_none=True)
catalog = fields.String(allow_none=True)
sql = fields.String(allow_none=True)
# Source database engine for SQL transpilation (virtual datasets only)
source_db_engine = fields.String(allow_none=True, load_default=None)
params = fields.Dict(allow_none=True)
template_params = fields.Dict(allow_none=True)
filter_select_enabled = fields.Boolean()
Expand Down
21 changes: 18 additions & 3 deletions superset/sql/parse.py
Original file line number Diff line number Diff line change
Expand Up @@ -1522,9 +1522,21 @@ def sanitize_clause(clause: str, engine: str) -> str:
raise QueryClauseValidationException(f"Invalid SQL clause: {clause}") from ex


def transpile_to_dialect(sql: str, target_engine: str) -> str:
def transpile_to_dialect(
sql: str,
target_engine: str,
source_engine: str | None = None,
) -> str:
"""
Transpile SQL from "generic SQL" to the target database dialect using SQLGlot.
Transpile SQL from one database dialect to another using SQLGlot.

Args:
sql: The SQL query to transpile
target_engine: The target database engine (e.g., "mysql", "postgresql")
source_engine: The source database engine. If None, uses generic SQL dialect.

Returns:
The transpiled SQL string

If the target engine is not in SQLGLOT_DIALECTS, returns the SQL as-is.
"""
Expand All @@ -1534,8 +1546,11 @@ def transpile_to_dialect(sql: str, target_engine: str) -> str:
if target_dialect is None:
return sql

# Get source dialect (default to generic if not specified)
source_dialect = SQLGLOT_DIALECTS.get(source_engine) if source_engine else Dialect

try:
parsed = sqlglot.parse_one(sql, dialect=Dialect)
parsed = sqlglot.parse_one(sql, dialect=source_dialect)
return Dialect.get_or_raise(target_dialect).generate(
parsed,
copy=True,
Expand Down
Loading
Loading