Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 4 additions & 3 deletions .circleci/test_migration.sh
Original file line number Diff line number Diff line change
Expand Up @@ -24,13 +24,14 @@ TEST_DIR="$TMP_DIR/$EXAMPLE_NAME"

echo "Running migration test for '$EXAMPLE_NAME' in '$TEST_DIR' for example project '$EXAMPLE_DIR' using options '$SQLMESH_OPTS'"

# Copy the example project from the *current* checkout so it's stable across old/new SQLMesh versions
cp -r "$EXAMPLE_DIR" "$TEST_DIR"

git checkout $LAST_TAG

# Install dependencies from the previous release.
make install-dev

cp -r $EXAMPLE_DIR $TEST_DIR

# this is only needed temporarily until the released tag for $LAST_TAG includes this config
if [ "$EXAMPLE_NAME" == "sushi_dbt" ]; then
echo 'migration_test_config = sqlmesh_config(Path(__file__).parent, dbt_target_name="duckdb")' >> $TEST_DIR/config.py
Expand All @@ -53,4 +54,4 @@ make install-dev
pushd $TEST_DIR
sqlmesh $SQLMESH_OPTS migrate
sqlmesh $SQLMESH_OPTS diff prod
popd
popd
2 changes: 1 addition & 1 deletion examples/sushi/models/customers.sql
Original file line number Diff line number Diff line change
Expand Up @@ -42,4 +42,4 @@ LEFT JOIN (
ON o.customer_id = m.customer_id
LEFT JOIN raw.demographics AS d
ON o.customer_id = d.customer_id
WHERE sushi.orders.customer_id > 0
WHERE o.customer_id > 0
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ dependencies = [
"requests",
"rich[jupyter]",
"ruamel.yaml",
"sqlglot[rs]~=27.28.0",
"sqlglot[rs]~=28.6.0",
"tenacity",
"time-machine",
"json-stream"
Expand Down
6 changes: 3 additions & 3 deletions sqlmesh/core/config/connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -2334,7 +2334,7 @@ def init(cursor: t.Any) -> None:
for tpe in subclasses(
__name__,
ConnectionConfig,
exclude=(ConnectionConfig, BaseDuckDBConnectionConfig),
exclude={ConnectionConfig, BaseDuckDBConnectionConfig},
)
}

Expand All @@ -2343,7 +2343,7 @@ def init(cursor: t.Any) -> None:
for tpe in subclasses(
__name__,
ConnectionConfig,
exclude=(ConnectionConfig, BaseDuckDBConnectionConfig),
exclude={ConnectionConfig, BaseDuckDBConnectionConfig},
)
}

Expand All @@ -2355,7 +2355,7 @@ def init(cursor: t.Any) -> None:
for tpe in subclasses(
__name__,
ConnectionConfig,
exclude=(ConnectionConfig, BaseDuckDBConnectionConfig),
exclude={ConnectionConfig, BaseDuckDBConnectionConfig},
)
}

Expand Down
2 changes: 1 addition & 1 deletion sqlmesh/core/config/scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ def get_default_catalog_per_gateway(self, context: GenericContext) -> t.Dict[str

SCHEDULER_CONFIG_TO_TYPE = {
tpe.all_field_infos()["type_"].default: tpe
for tpe in subclasses(__name__, BaseConfig, exclude=(BaseConfig,))
for tpe in subclasses(__name__, BaseConfig, exclude={BaseConfig})
}


Expand Down
4 changes: 2 additions & 2 deletions sqlmesh/core/engine_adapter/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -2861,7 +2861,7 @@ def _order_projections_and_filter(
return query

query = t.cast(exp.Query, query.copy())
with_ = query.args.pop("with", None)
with_ = query.args.pop("with_", None)

select_exprs: t.List[exp.Expression] = [
exp.column(c, quoted=True) for c in target_columns_to_types
Expand All @@ -2877,7 +2877,7 @@ def _order_projections_and_filter(
query = query.where(where, copy=False)

if with_:
query.set("with", with_)
query.set("with_", with_)

return query

Expand Down
2 changes: 1 addition & 1 deletion sqlmesh/core/linter/rules/builtin.py
Original file line number Diff line number Diff line change
Expand Up @@ -318,4 +318,4 @@ def check_model(self, model: Model) -> t.Optional[RuleViolation]:
return None


BUILTIN_RULES = RuleSet(subclasses(__name__, Rule, (Rule,)))
BUILTIN_RULES = RuleSet(subclasses(__name__, Rule, exclude={Rule}))
2 changes: 1 addition & 1 deletion sqlmesh/core/loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -840,7 +840,7 @@ def _load_linting_rules(self) -> RuleSet:
if os.path.getsize(path):
self._track_file(path)
module = import_python_file(path, self.config_path)
module_rules = subclasses(module.__name__, Rule, (Rule,))
module_rules = subclasses(module.__name__, Rule, exclude={Rule})
for user_rule in module_rules:
user_rules[user_rule.name] = user_rule

Expand Down
2 changes: 1 addition & 1 deletion sqlmesh/core/metric/rewriter.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ def _build_sources(self, projections: t.List[exp.Expression]) -> SourceAggsAndJo
return sources

def _expand(self, select: exp.Select) -> None:
base = select.args["from"].this.find(exp.Table)
base = select.args["from_"].this.find(exp.Table)
base_alias = base.alias_or_name
base_name = exp.table_name(base)

Expand Down
2 changes: 1 addition & 1 deletion sqlmesh/core/model/definition.py
Original file line number Diff line number Diff line change
Expand Up @@ -753,7 +753,7 @@ def ctas_query(self, **render_kwarg: t.Any) -> exp.Query:
query = self.render_query_or_raise(**render_kwarg).limit(0)

for select_or_set_op in query.find_all(exp.Select, exp.SetOperation):
if isinstance(select_or_set_op, exp.Select) and select_or_set_op.args.get("from"):
if isinstance(select_or_set_op, exp.Select) and select_or_set_op.args.get("from_"):
select_or_set_op.where(exp.false(), copy=False)

if self.managed_columns:
Expand Down
4 changes: 2 additions & 2 deletions sqlmesh/core/test/definition.py
Original file line number Diff line number Diff line change
Expand Up @@ -711,7 +711,7 @@ def runTest(self) -> None:
query = self._render_model_query()
sql = query.sql(self._test_adapter_dialect, pretty=self.engine_adapter._pretty_sql)

with_clause = query.args.get("with")
with_clause = query.args.get("with_")

if with_clause:
self.test_ctes(
Expand Down Expand Up @@ -905,7 +905,7 @@ def generate_test(
if isinstance(model, SqlModel):
assert isinstance(test, SqlModelTest)
model_query = test._render_model_query()
with_clause = model_query.args.get("with")
with_clause = model_query.args.get("with_")

if with_clause and include_ctes:
ctes = {}
Expand Down
4 changes: 2 additions & 2 deletions tests/core/engine_adapter/test_bigquery.py
Original file line number Diff line number Diff line change
Expand Up @@ -1245,7 +1245,7 @@ def test_sync_grants_config(make_mocked_engine_adapter: t.Callable, mocker: Mock
executed_sql = executed_query.sql(dialect="bigquery")
expected_sql = (
"SELECT privilege_type, grantee FROM `project`.`region-us-central1`.`INFORMATION_SCHEMA.OBJECT_PRIVILEGES` AS OBJECT_PRIVILEGES "
"WHERE object_schema = 'dataset' AND object_name = 'test_table' AND SPLIT(grantee, ':')[OFFSET(1)] <> session_user()"
"WHERE object_schema = 'dataset' AND object_name = 'test_table' AND SPLIT(grantee, ':')[OFFSET(1)] <> SESSION_USER()"
)
assert executed_sql == expected_sql

Expand Down Expand Up @@ -1306,7 +1306,7 @@ def test_sync_grants_config_with_overlaps(
executed_sql = executed_query.sql(dialect="bigquery")
expected_sql = (
"SELECT privilege_type, grantee FROM `project`.`region-us-central1`.`INFORMATION_SCHEMA.OBJECT_PRIVILEGES` AS OBJECT_PRIVILEGES "
"WHERE object_schema = 'dataset' AND object_name = 'test_table' AND SPLIT(grantee, ':')[OFFSET(1)] <> session_user()"
"WHERE object_schema = 'dataset' AND object_name = 'test_table' AND SPLIT(grantee, ':')[OFFSET(1)] <> SESSION_USER()"
)
assert executed_sql == expected_sql

Expand Down
24 changes: 12 additions & 12 deletions tests/core/engine_adapter/test_clickhouse.py
Original file line number Diff line number Diff line change
Expand Up @@ -327,16 +327,16 @@ def build_properties_sql(storage_format="", order_by="", primary_key="", propert

assert (
build_properties_sql(
order_by="ORDER_BY = 'timestamp with fill to toStartOfDay(toDateTime64(\\'2024-07-11\\', 3)) step toIntervalDay(1) interpolate(price as price)',"
order_by="ORDER_BY = 'timestamp with fill to dateTrunc(\\'DAY\\', toDateTime64(\\'2024-07-11\\', 3)) step toIntervalDay(1) interpolate(price as price)',"
)
== "ENGINE=MergeTree ORDER BY (timestamp WITH FILL TO toStartOfDay(toDateTime64('2024-07-11', 3)) STEP toIntervalDay(1) INTERPOLATE (price AS price))"
== "ENGINE=MergeTree ORDER BY (timestamp WITH FILL TO dateTrunc('DAY', toDateTime64('2024-07-11', 3)) STEP toIntervalDay(1) INTERPOLATE (price AS price))"
)

assert (
build_properties_sql(
order_by="ORDER_BY = (\"a\", 'timestamp with fill to toStartOfDay(toDateTime64(\\'2024-07-11\\', 3)) step toIntervalDay(1) interpolate(price as price)'),"
order_by="ORDER_BY = (\"a\", 'timestamp with fill to dateTrunc(\\'DAY\\', toDateTime64(\\'2024-07-11\\', 3)) step toIntervalDay(1) interpolate(price as price)'),"
)
== "ENGINE=MergeTree ORDER BY (\"a\", timestamp WITH FILL TO toStartOfDay(toDateTime64('2024-07-11', 3)) STEP toIntervalDay(1) INTERPOLATE (price AS price))"
== "ENGINE=MergeTree ORDER BY (\"a\", timestamp WITH FILL TO dateTrunc('DAY', toDateTime64('2024-07-11', 3)) STEP toIntervalDay(1) INTERPOLATE (price AS price))"
)

assert (
Expand Down Expand Up @@ -368,7 +368,7 @@ def test_partitioned_by_expr(make_mocked_engine_adapter: t.Callable):

assert (
model.partitioned_by[0].sql("clickhouse")
== """toMonday(CAST("ds" AS DateTime64(9, 'UTC')))"""
== """dateTrunc('WEEK', CAST("ds" AS DateTime64(9, 'UTC')))"""
)

# user specifies without time column, unknown time column type
Expand All @@ -393,7 +393,7 @@ def test_partitioned_by_expr(make_mocked_engine_adapter: t.Callable):
)

assert [p.sql("clickhouse") for p in model.partitioned_by] == [
"""toMonday(CAST("ds" AS DateTime64(9, 'UTC')))""",
"""dateTrunc('WEEK', CAST("ds" AS DateTime64(9, 'UTC')))""",
'"x"',
]

Expand All @@ -417,7 +417,7 @@ def test_partitioned_by_expr(make_mocked_engine_adapter: t.Callable):
)
)

assert model.partitioned_by[0].sql("clickhouse") == 'toMonday("ds")'
assert model.partitioned_by[0].sql("clickhouse") == """dateTrunc('WEEK', "ds")"""

# user doesn't specify, non-conformable time column type
model = load_sql_based_model(
Expand All @@ -441,7 +441,7 @@ def test_partitioned_by_expr(make_mocked_engine_adapter: t.Callable):

assert (
model.partitioned_by[0].sql("clickhouse")
== """CAST(toMonday(CAST("ds" AS DateTime64(9, 'UTC'))) AS String)"""
== """CAST(dateTrunc('WEEK', CAST("ds" AS DateTime64(9, 'UTC'))) AS String)"""
)

# user specifies partitioned_by with time column
Expand Down Expand Up @@ -993,7 +993,7 @@ def test_insert_overwrite_by_condition_replace_partitioned(
temp_table_mock.return_value = make_temp_table_name(table_name, "abcd")

fetchone_mock = mocker.patch("sqlmesh.core.engine_adapter.ClickhouseEngineAdapter.fetchone")
fetchone_mock.return_value = "toMonday(ds)"
fetchone_mock.return_value = "dateTrunc('WEEK', ds)"

insert_table_name = make_temp_table_name("new_records", "abcd")
existing_table_name = make_temp_table_name("existing_records", "abcd")
Expand Down Expand Up @@ -1069,7 +1069,7 @@ def test_insert_overwrite_by_condition_where_partitioned(
temp_table_mock.return_value = make_temp_table_name(table_name, "abcd")

fetchone_mock = mocker.patch("sqlmesh.core.engine_adapter.ClickhouseEngineAdapter.fetchone")
fetchone_mock.return_value = "toMonday(ds)"
fetchone_mock.return_value = "dateTrunc('WEEK', ds)"

fetchall_mock = mocker.patch("sqlmesh.core.engine_adapter.ClickhouseEngineAdapter.fetchall")
fetchall_mock.side_effect = [
Expand Down Expand Up @@ -1175,7 +1175,7 @@ def test_insert_overwrite_by_condition_by_key_partitioned(
temp_table_mock.return_value = make_temp_table_name(table_name, "abcd")

fetchone_mock = mocker.patch("sqlmesh.core.engine_adapter.ClickhouseEngineAdapter.fetchone")
fetchone_mock.side_effect = ["toMonday(ds)", "toMonday(ds)"]
fetchone_mock.side_effect = ["dateTrunc('WEEK', ds)", "dateTrunc('WEEK', ds)"]

fetchall_mock = mocker.patch("sqlmesh.core.engine_adapter.ClickhouseEngineAdapter.fetchall")
fetchall_mock.side_effect = [
Expand Down Expand Up @@ -1240,7 +1240,7 @@ def test_insert_overwrite_by_condition_inc_by_partition(
temp_table_mock.return_value = make_temp_table_name(table_name, "abcd")

fetchone_mock = mocker.patch("sqlmesh.core.engine_adapter.ClickhouseEngineAdapter.fetchone")
fetchone_mock.return_value = "toMonday(ds)"
fetchone_mock.return_value = "dateTrunc('WEEK', ds)"

fetchall_mock = mocker.patch("sqlmesh.core.engine_adapter.ClickhouseEngineAdapter.fetchall")
fetchall_mock.return_value = [("1",), ("2",), ("4",)]
Expand Down
Loading