Skip to content

Commit af1b4f1

Browse files
committed
feedback
1 parent b477bcd commit af1b4f1

File tree

2 files changed

+14
-18
lines changed

2 files changed

+14
-18
lines changed

sqlmesh/dbt/model.py

Lines changed: 9 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,7 @@
3131
OnAdditiveChange,
3232
on_destructive_change_validator,
3333
on_additive_change_validator,
34+
TimeColumn,
3435
)
3536
from sqlmesh.dbt.basemodel import BaseModelConfig, Materialization, SnapshotStrategy
3637
from sqlmesh.dbt.common import SqlStr, sql_str_validator
@@ -85,7 +86,7 @@ class ModelConfig(BaseModelConfig):
8586

8687
# sqlmesh fields
8788
sql: SqlStr = SqlStr("")
88-
time_column: t.Optional[str] = None
89+
time_column: t.Optional[TimeColumn] = None
8990
cron: t.Optional[str] = None
9091
interval_unit: t.Optional[str] = None
9192
batch_concurrency: t.Optional[int] = None
@@ -152,6 +153,7 @@ class ModelConfig(BaseModelConfig):
152153
_sql_validator = sql_str_validator
153154
_on_destructive_change_validator = on_destructive_change_validator
154155
_on_additive_change_validator = on_additive_change_validator
156+
_time_column_validator = TimeColumn.validator()
155157

156158
@field_validator(
157159
"unique_key",
@@ -243,17 +245,6 @@ def snapshot_strategy(self) -> t.Optional[SnapshotStrategy]:
243245
def table_schema(self) -> str:
244246
return self.target_schema or super().table_schema
245247

246-
def _get_overlapping_field_value(
247-
self, context: DbtContext, dbt_field_name: str, sqlmesh_field_name: str
248-
) -> t.Optional[t.Any]:
249-
dbt_field = self._get_field_value(dbt_field_name)
250-
sqlmesh_field = getattr(self, sqlmesh_field_name, None)
251-
if dbt_field is not None and sqlmesh_field is not None:
252-
get_console().log_warning(
253-
f"Both '{dbt_field_name}' and '{sqlmesh_field_name}' are set for model '{self.canonical_name(context)}'. '{sqlmesh_field_name}' will be used."
254-
)
255-
return sqlmesh_field if sqlmesh_field is not None else dbt_field
256-
257248
def model_kind(self, context: DbtContext) -> ModelKind:
258249
"""
259250
Get the sqlmesh ModelKind
@@ -342,16 +333,18 @@ def model_kind(self, context: DbtContext) -> ModelKind:
342333
f"Supported strategies include {collection_to_str(INCREMENTAL_BY_TIME_RANGE_STRATEGIES)}."
343334
)
344335

345-
if self.time_column and strategy not in {"incremental_by_time_range", "microbatch"}:
336+
if self.time_column and strategy != "incremental_by_time_range":
346337
get_console().log_warning(
347338
f"Using `time_column` on a model with incremental_strategy '{strategy}' has been deprecated. "
348339
f"Please use `incremental_by_time_range` instead in model '{self.canonical_name(context)}'."
349340
)
350341

351342
if strategy == "microbatch":
352-
time_column = self._get_overlapping_field_value(
353-
context, "event_time", "time_column"
354-
)
343+
if self.time_column:
344+
raise ConfigError(
345+
f"{self.canonical_name(context)}: 'time_column' cannot be used with 'microbatch' incremental strategy. Use 'event_time' instead."
346+
)
347+
time_column = self._get_field_value("event_time")
355348
if not time_column:
356349
raise ConfigError(
357350
f"{self.canonical_name(context)}: 'event_time' is required for microbatch incremental strategy."

tests/dbt/test_model.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -354,7 +354,10 @@ def test_load_incremental_time_range_strategy_all_defined(
354354
config(
355355
materialized='incremental',
356356
incremental_strategy='incremental_by_time_range',
357-
time_column='ds',
357+
time_column={
358+
'column': 'ds',
359+
'format': '%Y%m%d'
360+
},
358361
auto_restatement_intervals=3,
359362
partition_by_time_column=false,
360363
lookback=5,
@@ -393,7 +396,7 @@ def test_load_incremental_time_range_strategy_all_defined(
393396
assert model.kind.partition_by_time_column is False
394397
assert model.kind.lookback == 5
395398
assert model.kind.time_column == TimeColumn(
396-
column=exp.to_column("ds", quoted=True), format="%Y-%m-%d"
399+
column=exp.to_column("ds", quoted=True), format="%Y%m%d"
397400
)
398401
assert model.kind.batch_size == 3
399402
assert model.kind.batch_concurrency == 2

0 commit comments

Comments
 (0)