Skip to content

Commit c02306f

Browse files
use tmp_path in integration tests, clean up and extend them
1 parent 51c0bab commit c02306f

6 files changed

Lines changed: 74 additions & 68 deletions

File tree

tests/core/test_integration.py

Lines changed: 70 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -6,11 +6,12 @@
66
from unittest import mock
77
from unittest.mock import patch
88

9+
import os
910
import numpy as np
1011
import pandas as pd
1112
import pytest
1213
from pathlib import Path
13-
import os
14+
from sqlmesh.core.config.naming import NameInferenceConfig
1415
from sqlmesh.utils.concurrency import NodeExecutionFailedError
1516
import time_machine
1617
from pytest_mock.plugin import MockerFixture
@@ -4497,52 +4498,70 @@ def test_multi(mocker):
44974498

44984499

44994500
@use_terminal_console
4500-
def test_multi_virtual_layer(mocker):
4501-
context = Context(paths=["tests/fixtures/multi_virtual_layer"])
4501+
def test_multi_virtual_layer(copy_to_temp_path):
4502+
paths = copy_to_temp_path("tests/fixtures/multi_virtual_layer")
4503+
path = Path(paths[0])
4504+
first_db_path = str(path / "db_1.db")
4505+
second_db_path = str(path / "db_2.db")
45024506

4503-
local_db = "db.duckdb"
4504-
if os.path.exists(local_db):
4505-
os.remove(local_db)
4507+
config = Config(
4508+
gateways={
4509+
"first": GatewayConfig(
4510+
connection=DuckDBConnectionConfig(database=first_db_path),
4511+
variables={"overriden_var": "gateway_1"},
4512+
),
4513+
"second": GatewayConfig(
4514+
connection=DuckDBConnectionConfig(database=second_db_path),
4515+
variables={"overriden_var": "gateway_2"},
4516+
),
4517+
},
4518+
model_defaults=ModelDefaultsConfig(dialect="duckdb"),
4519+
model_naming=NameInferenceConfig(infer_names=True),
4520+
default_gateway="first",
4521+
gateway_managed_virtual_layer=True,
4522+
variables={"overriden_var": "global", "global_one": 88},
4523+
)
4524+
4525+
context = Context(paths=paths, config=config)
45064526

45074527
# For the model without gateway the default should be used and the gateway variable should overide the global
45084528
assert (
4509-
context.render("local_schema.model_one").sql()
4529+
context.render("first_schema.model_one").sql()
45104530
== 'SELECT \'gateway_1\' AS "item_id", 88 AS "global_one", 1 AS "macro_one"'
45114531
)
45124532

45134533
# For model with gateway specified the appropriate variable should be used to overide
45144534
assert (
4515-
context.render("memory.memory_schema.model_one").sql()
4535+
context.render("db_2.second_schema.model_one").sql()
45164536
== 'SELECT \'gateway_2\' AS "item_id", 88 AS "global_one", 1 AS "macro_one"'
45174537
)
45184538

4519-
# context._new_state_sync().reset(default_catalog=context.default_catalog)
45204539
plan = context.plan_builder().build()
45214540
assert len(plan.new_snapshots) == 4
45224541
context.apply(plan)
45234542

45244543
# Validate the tables that source from the first tables are correct as well with evaluate
45254544
assert (
45264545
context.evaluate(
4527-
"local_schema.model_two", start=now(), end=now(), execution_time=now()
4546+
"first_schema.model_two", start=now(), end=now(), execution_time=now()
45284547
).to_string()
45294548
== " item_id global_one\n0 gateway_1 88"
45304549
)
45314550
assert (
45324551
context.evaluate(
4533-
"memory.memory_schema.model_two", start=now(), end=now(), execution_time=now()
4552+
"db_2.second_schema.model_two", start=now(), end=now(), execution_time=now()
45344553
).to_string()
45354554
== " item_id global_one\n0 gateway_2 88"
45364555
)
45374556

45384557
assert sorted(set(snapshot.name for snapshot in plan.directly_modified)) == [
4539-
'"db"."local_schema"."model_one"',
4540-
'"db"."local_schema"."model_two"',
4541-
'"memory"."memory_schema"."model_one"',
4542-
'"memory"."memory_schema"."model_two"',
4558+
'"db_1"."first_schema"."model_one"',
4559+
'"db_1"."first_schema"."model_two"',
4560+
'"db_2"."second_schema"."model_one"',
4561+
'"db_2"."second_schema"."model_two"',
45434562
]
45444563

4545-
model = context.get_model("memory.memory_schema.model_one")
4564+
model = context.get_model("db_1.first_schema.model_one")
45464565

45474566
context.upsert_model(model.copy(update={"query": model.query.select("'c' AS extra")}))
45484567
plan = context.plan_builder().build()
@@ -4553,52 +4572,70 @@ def test_multi_virtual_layer(mocker):
45534572

45544573
assert state_environments[0].gateway_managed
45554574
assert len(state_snapshots) == len(state_environments[0].snapshots)
4556-
45574575
assert [snapshot.name for snapshot in plan.directly_modified] == [
4558-
'"memory"."memory_schema"."model_one"'
4576+
'"db_1"."first_schema"."model_one"'
45594577
]
45604578
assert [x.name for x in list(plan.indirectly_modified.values())[0]] == [
4561-
'"memory"."memory_schema"."model_two"'
4579+
'"db_1"."first_schema"."model_two"'
45624580
]
45634581

45644582
assert len(plan.missing_intervals) == 1
4565-
45664583
assert (
45674584
context.evaluate(
4568-
"memory.memory_schema.model_one", start=now(), end=now(), execution_time=now()
4585+
"db_1.first_schema.model_one", start=now(), end=now(), execution_time=now()
45694586
).to_string()
4570-
== " item_id global_one macro_one extra\n0 gateway_2 88 1 c"
4587+
== " item_id global_one macro_one extra\n0 gateway_1 88 1 c"
45714588
)
45724589

4573-
# Create dev environment
4574-
model = context.get_model("db.local_schema.model_one")
4590+
# Create dev environment with changed models
4591+
model = context.get_model("db_2.second_schema.model_one")
45754592
context.upsert_model(model.copy(update={"query": model.query.select("'d' AS extra")}))
4593+
model = context.get_model("first_schema.model_two")
4594+
context.upsert_model(model.copy(update={"query": model.query.select("'d2' AS col")}))
45764595
plan = context.plan_builder("dev").build()
45774596
context.apply(plan)
45784597

45794598
dev_environment = context.state_sync.get_environment("dev")
45804599
assert dev_environment is not None
4581-
metadata = DuckDBMetadata.from_context(context)
4582-
start_schemas = set(metadata.schemas)
4583-
assert sorted(start_schemas) == sorted(
4584-
{"local_schema", "local_schema__dev", "sqlmesh", "sqlmesh__local_schema"}
4600+
4601+
metadata_engine_1 = DuckDBMetadata.from_context(context)
4602+
start_schemas_1 = set(metadata_engine_1.schemas)
4603+
assert sorted(start_schemas_1) == sorted(
4604+
{"first_schema__dev", "sqlmesh", "first_schema", "sqlmesh__first_schema"}
4605+
)
4606+
4607+
metadata_engine_2 = DuckDBMetadata(context._get_engine_adapter("second"))
4608+
start_schemas_2 = set(metadata_engine_2.schemas)
4609+
assert sorted(start_schemas_2) == sorted(
4610+
{"sqlmesh__second_schema", "second_schema", "second_schema__dev"}
45854611
)
45864612

45874613
# Invalidate dev environment
45884614
context.invalidate_environment("dev")
45894615
invalidate_environment = context.state_sync.get_environment("dev")
45904616
assert invalidate_environment is not None
4591-
schemas_prior_to_janitor = set(metadata.schemas)
45924617
assert invalidate_environment.expiration_ts < dev_environment.expiration_ts # type: ignore
4593-
assert sorted(start_schemas) == sorted(schemas_prior_to_janitor)
4618+
assert sorted(start_schemas_1) == sorted(set(metadata_engine_1.schemas))
4619+
assert sorted(start_schemas_2) == sorted(set(metadata_engine_2.schemas))
45944620

45954621
# Run janitor
45964622
context._run_janitor()
4597-
removed_schemas = start_schemas - set(metadata.schemas)
45984623
assert context.state_sync.get_environment("dev") is None
4599-
assert removed_schemas == {"local_schema__dev"}
4624+
removed_schemas = start_schemas_1 - set(metadata_engine_1.schemas)
4625+
assert removed_schemas == {"first_schema__dev"}
4626+
removed_schemas = start_schemas_2 - set(metadata_engine_2.schemas)
4627+
assert removed_schemas == {"second_schema__dev"}
46004628
prod_environment = context.state_sync.get_environment("prod")
4601-
assert len(prod_environment.snapshots_) == 4
4629+
4630+
# Remove the second gateway's second model and apply plan
4631+
second_model = path / "models/second_schema/model_two.sql"
4632+
os.remove(second_model)
4633+
assert not second_model.exists()
4634+
context = Context(paths=paths, config=config)
4635+
plan = context.plan_builder().build()
4636+
context.apply(plan)
4637+
prod_environment = context.state_sync.get_environment("prod")
4638+
assert len(prod_environment.snapshots_) == 3
46024639

46034640
# Changing the flag should show a diff
46044641
context.gateway_managed_virtual_layer = False
@@ -4614,9 +4651,6 @@ def test_multi_virtual_layer(mocker):
46144651
with pytest.raises(NodeExecutionFailedError, match=r"Execution failed for node SnapshotId*"):
46154652
context.apply(plan)
46164653

4617-
if os.path.exists(local_db):
4618-
os.remove(local_db)
4619-
46204654

46214655
def test_multi_dbt(mocker):
46224656
context = Context(paths=["examples/multi_dbt/bronze", "examples/multi_dbt/silver"])

tests/fixtures/multi_virtual_layer/config.yaml

Lines changed: 0 additions & 28 deletions
This file was deleted.

tests/fixtures/multi_virtual_layer/models/local_schema/model_one.sql renamed to tests/fixtures/multi_virtual_layer/models/first_schema/model_one.sql

File renamed without changes.

tests/fixtures/multi_virtual_layer/models/local_schema/model_two.sql renamed to tests/fixtures/multi_virtual_layer/models/first_schema/model_two.sql

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,4 +6,4 @@ SELECT
66
item_id,
77
global_one
88
FROM
9-
local_schema.model_one;
9+
first_schema.model_one;

tests/fixtures/multi_virtual_layer/models/memory_schema/model_one.sql renamed to tests/fixtures/multi_virtual_layer/models/second_schema/model_one.sql

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
MODEL (
22
kind FULL,
3-
gateway memory
3+
gateway second
44
);
55

66
SELECT

tests/fixtures/multi_virtual_layer/models/memory_schema/model_two.sql renamed to tests/fixtures/multi_virtual_layer/models/second_schema/model_two.sql

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,10 @@
11
MODEL (
22
kind FULL,
3-
gateway memory
3+
gateway second
44
);
55

66
SELECT
77
item_id,
88
global_one
99
FROM
10-
memory_schema.model_one;
10+
second_schema.model_one;

0 commit comments

Comments
 (0)