1616from sqlmesh .core .context import Context
1717from sqlmesh .integrations .dlt import generate_dlt_models
1818from sqlmesh .utils .date import now_ds , time_like_to_str , timedelta , to_datetime , yesterday_ds
19+ from sqlmesh .core .config .connection import DIALECT_TO_TYPE
1920
2021FREEZE_TIME = "2023-01-01 00:00:00 UTC"
2122
@@ -885,7 +886,7 @@ def test_dlt_pipeline_errors(runner, tmp_path):
885886 # Error if no pipeline is provided
886887 result = runner .invoke (cli , ["--paths" , tmp_path , "init" , "-t" , "dlt" , "duckdb" ])
887888 assert (
888- "Error: DLT pipeline is a required argument to generate a SQLMesh project from DLT"
889+ "Error: Please provide a DLT pipeline with the `--dlt-pipeline` flag to generate a SQLMesh project from DLT"
889890 in result .output
890891 )
891892
@@ -948,11 +949,12 @@ def test_dlt_filesystem_pipeline(tmp_path):
948949 assert incremental_model == expected_incremental_model
949950
950951 expected_config = (
952+ "# --- Gateway Connection ---\n "
951953 "gateways:\n "
952954 " athena:\n "
953955 " connection:\n "
954956 " # For more information on configuring the connection to your execution engine, visit:\n "
955- " # https://sqlmesh.readthedocs.io/en/stable/reference/configuration/#connections \n "
957+ " # https://sqlmesh.readthedocs.io/en/stable/reference/configuration/#connection \n "
956958 " # https://sqlmesh.readthedocs.io/en/stable/integrations/engines/athena/#connection-options\n "
957959 " type: athena\n "
958960 " # concurrent_tasks: 4\n "
@@ -968,11 +970,22 @@ def test_dlt_filesystem_pipeline(tmp_path):
968970 " # s3_staging_dir: \n "
969971 " # schema_name: \n "
970972 " # catalog_name: \n "
971- " # s3_warehouse_location: \n \n \n "
973+ " # s3_warehouse_location: \n \n "
972974 "default_gateway: athena\n \n "
975+ "# --- Model Defaults ---\n "
976+ "# https://sqlmesh.readthedocs.io/en/stable/reference/model_configuration/#model-defaults\n \n "
973977 "model_defaults:\n "
974978 " dialect: athena\n "
975- f" start: { yesterday_ds ()} \n "
979+ f" start: { yesterday_ds ()} # Start date for backfill history\n "
980+ " cron: '@daily' # Run models daily at 12am UTC (can override per model)\n \n "
981+ "# --- Linting Rules ---\n "
982+ "# Enforce standards for your team\n "
983+ "# https://sqlmesh.readthedocs.io/en/stable/guides/linter/\n \n "
984+ "linter:\n "
985+ " enabled: true\n "
986+ " rules:\n "
987+ " - ambiguousorinvalidcolumn\n "
988+ " - invalidselectstarexpansion\n "
976989 )
977990
978991 with open (config_path ) as file :
@@ -985,7 +998,7 @@ def test_dlt_filesystem_pipeline(tmp_path):
985998
986999
9871000@time_machine .travel (FREEZE_TIME )
988- def test_plan_dlt (runner , tmp_path ):
1001+ def test_dlt_pipeline (runner , tmp_path ):
9891002 from dlt .common .pipeline import get_dlt_pipelines_dir
9901003
9911004 root_dir = path .abspath (getcwd ())
@@ -1008,17 +1021,31 @@ def test_plan_dlt(runner, tmp_path):
10081021 dlt_path = get_dlt_pipelines_dir ()
10091022 init_example_project (tmp_path , "duckdb" , ProjectTemplate .DLT , "sushi" , dlt_path = dlt_path )
10101023
1011- expected_config = f"""gateways:
1024+ expected_config = f"""# --- Gateway Connection ---
1025+ gateways:
10121026 duckdb:
10131027 connection:
10141028 type: duckdb
10151029 database: { dataset_path }
1016-
10171030default_gateway: duckdb
10181031
1032+ # --- Model Defaults ---
1033+ # https://sqlmesh.readthedocs.io/en/stable/reference/model_configuration/#model-defaults
1034+
10191035model_defaults:
10201036 dialect: duckdb
1021- start: { yesterday_ds ()}
1037+ start: { yesterday_ds ()} # Start date for backfill history
1038+ cron: '@daily' # Run models daily at 12am UTC (can override per model)
1039+
1040+ # --- Linting Rules ---
1041+ # Enforce standards for your team
1042+ # https://sqlmesh.readthedocs.io/en/stable/guides/linter/
1043+
1044+ linter:
1045+ enabled: true
1046+ rules:
1047+ - ambiguousorinvalidcolumn
1048+ - invalidselectstarexpansion
10221049"""
10231050
10241051 with open (tmp_path / "config.yaml" ) as file :
@@ -1167,30 +1194,6 @@ def test_plan_dlt(runner, tmp_path):
11671194 remove (dataset_path )
11681195
11691196
1170- @time_machine .travel (FREEZE_TIME )
1171- def test_init_project_dialects (tmp_path ):
1172- dialect_to_config = {
1173- "redshift" : "# concurrent_tasks: 4\n # register_comments: True\n # pre_ping: False\n # pretty_sql: False\n # user: \n # password: \n # database: \n # host: \n # port: \n # source_address: \n # unix_sock: \n # ssl: \n # sslmode: \n # timeout: \n # tcp_keepalive: \n # application_name: \n # preferred_role: \n # principal_arn: \n # credentials_provider: \n # region: \n # cluster_identifier: \n # iam: \n # is_serverless: \n # serverless_acct_id: \n # serverless_work_group: \n # enable_merge: " ,
1174- "bigquery" : "# concurrent_tasks: 1\n # register_comments: True\n # pre_ping: False\n # pretty_sql: False\n # method: oauth\n # project: \n # execution_project: \n # quota_project: \n # location: \n # keyfile: \n # keyfile_json: \n # token: \n # refresh_token: \n # client_id: \n # client_secret: \n # token_uri: \n # scopes: \n # impersonated_service_account: \n # job_creation_timeout_seconds: \n # job_execution_timeout_seconds: \n # job_retries: 1\n # job_retry_deadline_seconds: \n # priority: \n # maximum_bytes_billed: " ,
1175- "snowflake" : "account: \n # concurrent_tasks: 4\n # register_comments: True\n # pre_ping: False\n # pretty_sql: False\n # user: \n # password: \n # warehouse: \n # database: \n # role: \n # authenticator: \n # token: \n # host: \n # port: \n # application: Tobiko_SQLMesh\n # private_key: \n # private_key_path: \n # private_key_passphrase: \n # session_parameters: " ,
1176- "databricks" : "# concurrent_tasks: 1\n # register_comments: True\n # pre_ping: False\n # pretty_sql: False\n # server_hostname: \n # http_path: \n # access_token: \n # auth_type: \n # oauth_client_id: \n # oauth_client_secret: \n # catalog: \n # http_headers: \n # session_configuration: \n # databricks_connect_server_hostname: \n # databricks_connect_access_token: \n # databricks_connect_cluster_id: \n # databricks_connect_use_serverless: False\n # force_databricks_connect: False\n # disable_databricks_connect: False\n # disable_spark_session: False" ,
1177- "postgres" : "host: \n user: \n password: \n port: \n database: \n # concurrent_tasks: 4\n # register_comments: True\n # pre_ping: True\n # pretty_sql: False\n # keepalives_idle: \n # connect_timeout: 10\n # role: \n # sslmode: \n # application_name: " ,
1178- }
1179-
1180- for dialect , expected_config in dialect_to_config .items ():
1181- init_example_project (tmp_path , dialect = dialect )
1182-
1183- config_start = f"gateways:\n { dialect } :\n connection:\n # For more information on configuring the connection to your execution engine, visit:\n # https://sqlmesh.readthedocs.io/en/stable/reference/configuration/#connections\n # https://sqlmesh.readthedocs.io/en/stable/integrations/engines/{ dialect } /#connection-options\n type: { dialect } \n "
1184- config_end = f"\n \n \n default_gateway: { dialect } \n \n model_defaults:\n dialect: { dialect } \n start: { yesterday_ds ()} \n "
1185-
1186- with open (tmp_path / "config.yaml" ) as file :
1187- config = file .read ()
1188-
1189- assert config == f"{ config_start } { expected_config } { config_end } "
1190-
1191- remove (tmp_path / "config.yaml" )
1192-
1193-
11941197@time_machine .travel (FREEZE_TIME )
11951198def test_environments (runner , tmp_path ):
11961199 create_example_project (tmp_path )
@@ -1340,8 +1343,6 @@ def test_state_export(runner: CliRunner, tmp_path: Path) -> None:
13401343 catch_exceptions = False ,
13411344 )
13421345 assert result .exit_code == 0
1343-
1344- # verify output
13451346 assert "Gateway: local" in result .output
13461347 assert "Type: duckdb" in result .output
13471348 assert "Exporting versions" in result .output
@@ -1688,27 +1689,6 @@ def test_state_import_local(runner: CliRunner, tmp_path: Path) -> None:
16881689 assert "Aborting" in result .output
16891690
16901691
1691- def test_dbt_init (tmp_path ):
1692- # The dbt init project doesn't require a dialect
1693- init_example_project (tmp_path , dialect = None , template = ProjectTemplate .DBT )
1694-
1695- config_path = tmp_path / "config.py"
1696- assert config_path .exists ()
1697-
1698- with open (config_path ) as file :
1699- config = file .read ()
1700-
1701- assert (
1702- config
1703- == """from pathlib import Path
1704-
1705- from sqlmesh.dbt.loader import sqlmesh_config
1706-
1707- config = sqlmesh_config(Path(__file__).parent)
1708- """
1709- )
1710-
1711-
17121692def test_ignore_warnings (runner : CliRunner , tmp_path : Path ) -> None :
17131693 create_example_project (tmp_path )
17141694
@@ -1791,3 +1771,210 @@ def test_table_diff_schema_diff_ignore_case(runner: CliRunner, tmp_path: Path):
17911771 assert result .exit_code == 0
17921772 stripped_output = "" .join ((x for x in result .output if x in string .printable ))
17931773 assert "Schema Diff Between 'T1' and 'T2':\n Schemas match" in stripped_output
1774+
1775+
1776+ # passing an invalid engine_type errors
1777+ def test_init_bad_engine_type (runner : CliRunner , tmp_path : Path ):
1778+ result = runner .invoke (
1779+ cli ,
1780+ ["--paths" , str (tmp_path ), "init" , "invalid" ],
1781+ )
1782+ assert result .exit_code == 1
1783+ assert "Invalid engine 'invalid'. Please specify one of " in result .output
1784+
1785+
1786+ # passing an invalid template errors
1787+ def test_init_bad_template (runner : CliRunner , tmp_path : Path ):
1788+ result = runner .invoke (
1789+ cli ,
1790+ ["--paths" , str (tmp_path ), "init" , "-t" , "invalid_template" ],
1791+ )
1792+ assert result .exit_code == 1
1793+ assert "Invalid project template 'invalid_template'. Please specify one of " in result .output
1794+
1795+
1796+ # empty template should not produce example project files
1797+ def test_init_empty_template (runner : CliRunner , tmp_path : Path ):
1798+ result = runner .invoke (
1799+ cli ,
1800+ ["--paths" , str (tmp_path ), "init" , "duckdb" , "-t" , "empty" ],
1801+ )
1802+ assert result .exit_code == 0
1803+
1804+ # Directories should exist, but example project files should not.
1805+ assert (tmp_path / "models" ).exists ()
1806+ assert not (tmp_path / "models" / "full_model.sql" ).exists ()
1807+ assert not (tmp_path / "models" / "incremental_model.sql" ).exists ()
1808+ assert not (tmp_path / "seeds" / "seed_data.csv" ).exists ()
1809+
1810+
1811+ # interactive init begins when no engine_type is provided and template is not dbt
1812+ def test_init_interactive_start (runner : CliRunner , tmp_path : Path ):
1813+ # Input: 1 (DEFAULT template), 1 (duckdb engine), 1 (DEFAULT CLI mode)
1814+ result = runner .invoke (
1815+ cli ,
1816+ ["--paths" , str (tmp_path ), "init" ],
1817+ input = "1\n 1\n 1\n " ,
1818+ )
1819+ assert result .exit_code == 0
1820+ assert "Choose your SQL engine" in result .output
1821+
1822+ # dbt template passed, so no interactive
1823+ result = runner .invoke (
1824+ cli ,
1825+ ["--paths" , str (tmp_path ), "init" , "-t" , "dbt" ],
1826+ )
1827+ assert "Choose your SQL engine" not in result .output
1828+
1829+
1830+ # passing an invalid integer response displays error
1831+ def test_init_interactive_invalid_int (runner : CliRunner , tmp_path : Path ):
1832+ # First response is invalid (0) followed by valid selections.
1833+ # Input: 0 (invalid), 1 (DEFAULT template), 1 (duckdb engine), 1 (DEFAULT CLI mode)
1834+ result = runner .invoke (
1835+ cli ,
1836+ ["--paths" , str (tmp_path ), "init" ],
1837+ input = "0\n 1\n 1\n 1\n " ,
1838+ )
1839+ assert result .exit_code == 1
1840+ assert (
1841+ "'0' is not a valid project type number - please enter a number between 1" in result .output
1842+ )
1843+
1844+
1845+ # interactive init template step should not appear if a template is passed
1846+ def test_init_interactive_template_passed (runner : CliRunner , tmp_path : Path ):
1847+ # Input: 1 (duckdb engine), 1 (DEFAULT CLI mode)
1848+ result = runner .invoke (
1849+ cli ,
1850+ ["--paths" , str (tmp_path ), "init" , "-t" , "empty" ],
1851+ input = "1\n 1\n " ,
1852+ )
1853+ assert result .exit_code == 0
1854+ assert "What type of project do you want to set up?" not in result .output
1855+
1856+
1857+ def test_init_interactive_cli_mode_default (runner : CliRunner , tmp_path : Path ):
1858+ # Input: 1 (DEFAULT template), 1 (duckdb engine), 1 (DEFAULT CLI mode)
1859+ result = runner .invoke (
1860+ cli ,
1861+ ["--paths" , str (tmp_path ), "init" ],
1862+ input = "1\n 1\n 1\n " ,
1863+ )
1864+ assert result .exit_code == 0
1865+
1866+ config_path = tmp_path / "config.yaml"
1867+ assert config_path .exists ()
1868+ assert "no_diff: true" not in config_path .read_text ()
1869+
1870+
1871+ def test_init_interactive_cli_mode_simple (runner : CliRunner , tmp_path : Path ):
1872+ # Input: 1 (DEFAULT template), 1 (duckdb engine), 2 (SIMPLE CLI mode)
1873+ result = runner .invoke (
1874+ cli ,
1875+ ["--paths" , str (tmp_path ), "init" ],
1876+ input = "1\n 1\n 2\n " ,
1877+ )
1878+ assert result .exit_code == 0
1879+
1880+ config_path = tmp_path / "config.yaml"
1881+ assert config_path .exists ()
1882+ assert "no_diff: true" in config_path .read_text ()
1883+
1884+
1885+ # dbt template without dbt_project.yml in directory should error
1886+ def test_init_dbt_template_no_dbt_project (runner : CliRunner , tmp_path : Path ):
1887+ # template passed to init
1888+ result = runner .invoke (
1889+ cli ,
1890+ ["--paths" , str (tmp_path ), "init" , "-t" , "dbt" ],
1891+ )
1892+ assert result .exit_code == 1
1893+ assert (
1894+ "Required dbt project file 'dbt_project.yml' not found in the current directory."
1895+ in result .output
1896+ )
1897+
1898+ # interactive init
1899+ # Input: 2 (dbt template)
1900+ result = runner .invoke (
1901+ cli ,
1902+ ["--paths" , str (tmp_path ), "init" ],
1903+ input = "2\n " ,
1904+ )
1905+ assert result .exit_code == 1
1906+ assert (
1907+ "Required dbt project file 'dbt_project.yml' not found in the current directory."
1908+ in result .output
1909+ )
1910+
1911+
1912+ def test_init_dbt_template (runner : CliRunner , tmp_path : Path ):
1913+ Path (tmp_path / "dbt_project.yml" ).touch ()
1914+ result = runner .invoke (
1915+ cli ,
1916+ ["--paths" , str (tmp_path ), "init" ],
1917+ input = "2\n " ,
1918+ )
1919+ assert result .exit_code == 0
1920+
1921+ config_path = tmp_path / "config.py"
1922+ assert config_path .exists ()
1923+
1924+ with open (config_path ) as file :
1925+ config = file .read ()
1926+
1927+ assert (
1928+ config
1929+ == """from pathlib import Path
1930+
1931+ from sqlmesh.dbt.loader import sqlmesh_config
1932+
1933+ config = sqlmesh_config(Path(__file__).parent)
1934+ """
1935+ )
1936+
1937+
1938+ @time_machine .travel (FREEZE_TIME )
1939+ def test_init_project_engine_configs (tmp_path ):
1940+ engine_type_to_config = {
1941+ "redshift" : "# concurrent_tasks: 4\n # register_comments: True\n # pre_ping: False\n # pretty_sql: False\n # user: \n # password: \n # database: \n # host: \n # port: \n # source_address: \n # unix_sock: \n # ssl: \n # sslmode: \n # timeout: \n # tcp_keepalive: \n # application_name: \n # preferred_role: \n # principal_arn: \n # credentials_provider: \n # region: \n # cluster_identifier: \n # iam: \n # is_serverless: \n # serverless_acct_id: \n # serverless_work_group: \n # enable_merge: " ,
1942+ "bigquery" : "# concurrent_tasks: 1\n # register_comments: True\n # pre_ping: False\n # pretty_sql: False\n # method: oauth\n # project: \n # execution_project: \n # quota_project: \n # location: \n # keyfile: \n # keyfile_json: \n # token: \n # refresh_token: \n # client_id: \n # client_secret: \n # token_uri: \n # scopes: \n # impersonated_service_account: \n # job_creation_timeout_seconds: \n # job_execution_timeout_seconds: \n # job_retries: 1\n # job_retry_deadline_seconds: \n # priority: \n # maximum_bytes_billed: " ,
1943+ "snowflake" : "account: \n # concurrent_tasks: 4\n # register_comments: True\n # pre_ping: False\n # pretty_sql: False\n # user: \n # password: \n # warehouse: \n # database: \n # role: \n # authenticator: \n # token: \n # host: \n # port: \n # application: Tobiko_SQLMesh\n # private_key: \n # private_key_path: \n # private_key_passphrase: \n # session_parameters: " ,
1944+ "databricks" : "# concurrent_tasks: 1\n # register_comments: True\n # pre_ping: False\n # pretty_sql: False\n # server_hostname: \n # http_path: \n # access_token: \n # auth_type: \n # oauth_client_id: \n # oauth_client_secret: \n # catalog: \n # http_headers: \n # session_configuration: \n # databricks_connect_server_hostname: \n # databricks_connect_access_token: \n # databricks_connect_cluster_id: \n # databricks_connect_use_serverless: False\n # force_databricks_connect: False\n # disable_databricks_connect: False\n # disable_spark_session: False" ,
1945+ "postgres" : "host: \n user: \n password: \n port: \n database: \n # concurrent_tasks: 4\n # register_comments: True\n # pre_ping: True\n # pretty_sql: False\n # keepalives_idle: \n # connect_timeout: 10\n # role: \n # sslmode: \n # application_name: " ,
1946+ }
1947+
1948+ for engine_type , expected_config in engine_type_to_config .items ():
1949+ init_example_project (tmp_path , engine_type = engine_type )
1950+
1951+ config_start = f"# --- Gateway Connection ---\n gateways:\n { engine_type } :\n connection:\n # For more information on configuring the connection to your execution engine, visit:\n # https://sqlmesh.readthedocs.io/en/stable/reference/configuration/#connection\n # https://sqlmesh.readthedocs.io/en/stable/integrations/engines/{ engine_type } /#connection-options\n type: { engine_type } \n "
1952+ config_end = f"""
1953+
1954+ default_gateway: { engine_type }
1955+
1956+ # --- Model Defaults ---
1957+ # https://sqlmesh.readthedocs.io/en/stable/reference/model_configuration/#model-defaults
1958+
1959+ model_defaults:
1960+ dialect: { DIALECT_TO_TYPE .get (engine_type )}
1961+ start: { yesterday_ds ()} # Start date for backfill history
1962+ cron: '@daily' # Run models daily at 12am UTC (can override per model)
1963+
1964+ # --- Linting Rules ---
1965+ # Enforce standards for your team
1966+ # https://sqlmesh.readthedocs.io/en/stable/guides/linter/
1967+
1968+ linter:
1969+ enabled: true
1970+ rules:
1971+ - ambiguousorinvalidcolumn
1972+ - invalidselectstarexpansion
1973+ """
1974+
1975+ with open (tmp_path / "config.yaml" ) as file :
1976+ config = file .read ()
1977+
1978+ assert config == f"{ config_start } { expected_config } { config_end } "
1979+
1980+ remove (tmp_path / "config.yaml" )
0 commit comments