sqlmesh.dbt.target
1from __future__ import annotations 2 3import abc 4import typing as t 5 6from pydantic import Field 7 8from sqlmesh.core.config import ( 9 ConnectionConfig, 10 DatabricksSQLConnectionConfig, 11 DuckDBConnectionConfig, 12 RedshiftConnectionConfig, 13 SnowflakeConnectionConfig, 14) 15from sqlmesh.core.model import IncrementalByTimeRangeKind, IncrementalByUniqueKeyKind 16from sqlmesh.utils import AttributeDict 17from sqlmesh.utils.errors import ConfigError 18from sqlmesh.utils.pydantic import PydanticModel 19 20IncrementalKind = t.Union[t.Type[IncrementalByUniqueKeyKind], t.Type[IncrementalByTimeRangeKind]] 21 22 23class TargetConfig(abc.ABC, PydanticModel): 24 """ 25 Configuration for DBT profile target 26 27 Args: 28 name: The name of this target 29 type: The type of the data warehouse 30 schema_: The target schema for this project 31 threads: The number of threads to run on 32 """ 33 34 # dbt 35 type: str 36 name: str = "" 37 schema_: str = Field(alias="schema") 38 threads: int = 1 39 profile_name: t.Optional[str] = None 40 41 @classmethod 42 def load(cls, name: str, data: t.Dict[str, t.Any]) -> TargetConfig: 43 """ 44 Loads the configuration from the yaml provided for a profile target 45 46 Args: 47 data: The yaml for the project's target output 48 49 Returns: 50 The configuration of the provided profile target 51 """ 52 db_type = data["type"] 53 if db_type == "databricks": 54 return DatabricksConfig(name=name, **data) 55 elif db_type == "duckdb": 56 return DuckDbConfig(name=name, **data) 57 elif db_type == "postgres": 58 return PostgresConfig(name=name, **data) 59 elif db_type == "redshift": 60 return RedshiftConfig(name=name, **data) 61 elif db_type == "snowflake": 62 return SnowflakeConfig(name=name, **data) 63 64 raise ConfigError(f"{db_type} not supported.") 65 66 def default_incremental_strategy(self, kind: IncrementalKind) -> str: 67 """The default incremental strategy for the db""" 68 raise NotImplementedError 69 70 def to_sqlmesh(self) -> ConnectionConfig: 71 """Converts target config to SQLMesh connection config""" 72 raise NotImplementedError 73 74 def target_jinja(self, profile_name: str) -> AttributeDict: 75 fields = self.dict().copy() 76 fields["profile_name"] = profile_name 77 fields["target_name"] = self.name 78 return AttributeDict(fields) 79 80 81class DuckDbConfig(TargetConfig): 82 """ 83 Connection config for DuckDb target 84 85 Args: 86 path: Location of the database file. If not specified, an in memory database is used. 87 """ 88 89 type: str = "duckdb" 90 path: t.Optional[str] = None 91 92 def default_incremental_strategy(self, kind: IncrementalKind) -> str: 93 return "delete+insert" 94 95 def to_sqlmesh(self) -> ConnectionConfig: 96 return DuckDBConnectionConfig(database=self.path, concurrent_tasks=self.threads) 97 98 99class SnowflakeConfig(TargetConfig): 100 """ 101 Project connection and operational configuration for the Snowflake target 102 103 Args: 104 account: Snowflake account 105 warehouse: Name of the warehouse 106 database: Name of the database 107 user: Name of the user 108 password: User's password 109 role: Role of the user 110 client_session_keep_alive: A boolean flag to extend the duration of the Snowflake session beyond 4 hours 111 query_tag: tag for the query in Snowflake 112 connect_retries: Number of times to retry if the Snowflake connector encounters an error 113 connect_timeout: Number of seconds to wait between failed attempts 114 retry_on_database_errors: A boolean flag to retry if a Snowflake connector Database error is encountered 115 retry_all: A boolean flag to retry on all Snowflake connector errors 116 """ 117 118 # TODO add other forms of authentication 119 type: str = "snowflake" 120 account: str 121 warehouse: str 122 database: str 123 user: str 124 password: str 125 role: t.Optional[str] 126 client_session_keep_alive: bool = False 127 query_tag: t.Optional[str] 128 connect_retries: int = 0 129 connect_timeout: int = 10 130 retry_on_database_errors: bool = False 131 retry_all: bool = False 132 133 def default_incremental_strategy(self, kind: IncrementalKind) -> str: 134 return "merge" 135 136 def to_sqlmesh(self) -> ConnectionConfig: 137 return SnowflakeConnectionConfig( 138 user=self.user, 139 password=self.password, 140 account=self.account, 141 warehouse=self.warehouse, 142 database=self.database, 143 role=self.role, 144 concurrent_tasks=self.threads, 145 ) 146 147 148class PostgresConfig(TargetConfig): 149 """ 150 Project connection and operational configuration for the Postgres target 151 152 Args: 153 host: The Postgres host to connect to 154 user: Name of the user 155 password: User's password 156 port: The port to connect to 157 dbname: Name of the database 158 keepalives_idle: Seconds between TCP keepalive packets 159 connect_timeout: Number of seconds to wait between failed attempts 160 retries: Number of times to retry if the Postgres connector encounters an error 161 search_path: Overrides the default search path 162 role: Role of the user 163 sslmode: SSL Mode used to connect to the database 164 """ 165 166 type: str = "postgres" 167 host: str 168 user: str 169 password: str 170 port: int 171 dbname: str 172 keepalives_idle: int = 0 173 connect_timeout: int = 10 174 retries: int = 1 175 search_path: t.Optional[str] = None 176 role: t.Optional[str] = None 177 sslmode: t.Optional[str] = None 178 179 def default_incremental_strategy(self, kind: IncrementalKind) -> str: 180 return "delete+insert" if kind is IncrementalByUniqueKeyKind else "append" 181 182 def to_sqlmesh(self) -> ConnectionConfig: 183 raise ConfigError("PostgreSQL is not supported by SQLMesh yet.") 184 185 186class RedshiftConfig(TargetConfig): 187 """ 188 Project connection and operational configuration for the Redshift target 189 190 Args: 191 host: The Redshift host to connect to 192 user: Name of the user 193 password: User's password 194 port: The port to connect to 195 dbname: Name of the database 196 keepalives_idle: Seconds between TCP keepalive packets 197 connect_timeout: Number of seconds to wait between failed attempts 198 ra3_node: Enables cross-database sources 199 search_path: Overrides the default search path 200 sslmode: SSL Mode used to connect to the database 201 """ 202 203 # TODO add other forms of authentication 204 type: str = "redshift" 205 host: str 206 user: str 207 password: str 208 port: int 209 dbname: str 210 keepalives_idle: int = 240 211 connect_timeout: int = 10 212 ra3_node: bool = True 213 search_path: t.Optional[str] = None 214 sslmode: t.Optional[str] = None 215 216 def default_incremental_strategy(self, kind: IncrementalKind) -> str: 217 return "append" 218 219 def to_sqlmesh(self) -> ConnectionConfig: 220 return RedshiftConnectionConfig( 221 user=self.user, 222 password=self.password, 223 host=self.host, 224 port=self.port, 225 sslmode=self.sslmode, 226 timeout=self.connect_timeout, 227 concurrent_tasks=self.threads, 228 ) 229 230 231class DatabricksConfig(TargetConfig): 232 """ 233 Project connection and operational configuration for the Databricks target 234 235 Args: 236 catalog: Catalog name to use for Unity Catalog 237 host: The Databricks host to connect to 238 http_path: The Databricks compute resources URL 239 token: Personal access token 240 """ 241 242 type: str = "databricks" 243 catalog: t.Optional[str] = None 244 host: str 245 http_path: str 246 token: str 247 248 def default_incremental_strategy(self, kind: IncrementalKind) -> str: 249 return "merge" 250 251 def to_sqlmesh(self) -> ConnectionConfig: 252 return DatabricksSQLConnectionConfig( 253 server_hostname=self.host, 254 http_path=self.http_path, 255 access_token=self.token, 256 concurrent_tasks=self.threads, 257 )
24class TargetConfig(abc.ABC, PydanticModel): 25 """ 26 Configuration for DBT profile target 27 28 Args: 29 name: The name of this target 30 type: The type of the data warehouse 31 schema_: The target schema for this project 32 threads: The number of threads to run on 33 """ 34 35 # dbt 36 type: str 37 name: str = "" 38 schema_: str = Field(alias="schema") 39 threads: int = 1 40 profile_name: t.Optional[str] = None 41 42 @classmethod 43 def load(cls, name: str, data: t.Dict[str, t.Any]) -> TargetConfig: 44 """ 45 Loads the configuration from the yaml provided for a profile target 46 47 Args: 48 data: The yaml for the project's target output 49 50 Returns: 51 The configuration of the provided profile target 52 """ 53 db_type = data["type"] 54 if db_type == "databricks": 55 return DatabricksConfig(name=name, **data) 56 elif db_type == "duckdb": 57 return DuckDbConfig(name=name, **data) 58 elif db_type == "postgres": 59 return PostgresConfig(name=name, **data) 60 elif db_type == "redshift": 61 return RedshiftConfig(name=name, **data) 62 elif db_type == "snowflake": 63 return SnowflakeConfig(name=name, **data) 64 65 raise ConfigError(f"{db_type} not supported.") 66 67 def default_incremental_strategy(self, kind: IncrementalKind) -> str: 68 """The default incremental strategy for the db""" 69 raise NotImplementedError 70 71 def to_sqlmesh(self) -> ConnectionConfig: 72 """Converts target config to SQLMesh connection config""" 73 raise NotImplementedError 74 75 def target_jinja(self, profile_name: str) -> AttributeDict: 76 fields = self.dict().copy() 77 fields["profile_name"] = profile_name 78 fields["target_name"] = self.name 79 return AttributeDict(fields)
Configuration for DBT profile target
Arguments:
- name: The name of this target
- type: The type of the data warehouse
- schema_: The target schema for this project
- threads: The number of threads to run on
42 @classmethod 43 def load(cls, name: str, data: t.Dict[str, t.Any]) -> TargetConfig: 44 """ 45 Loads the configuration from the yaml provided for a profile target 46 47 Args: 48 data: The yaml for the project's target output 49 50 Returns: 51 The configuration of the provided profile target 52 """ 53 db_type = data["type"] 54 if db_type == "databricks": 55 return DatabricksConfig(name=name, **data) 56 elif db_type == "duckdb": 57 return DuckDbConfig(name=name, **data) 58 elif db_type == "postgres": 59 return PostgresConfig(name=name, **data) 60 elif db_type == "redshift": 61 return RedshiftConfig(name=name, **data) 62 elif db_type == "snowflake": 63 return SnowflakeConfig(name=name, **data) 64 65 raise ConfigError(f"{db_type} not supported.")
Loads the configuration from the yaml provided for a profile target
Arguments:
- data: The yaml for the project's target output
Returns:
The configuration of the provided profile target
67 def default_incremental_strategy(self, kind: IncrementalKind) -> str: 68 """The default incremental strategy for the db""" 69 raise NotImplementedError
The default incremental strategy for the db
71 def to_sqlmesh(self) -> ConnectionConfig: 72 """Converts target config to SQLMesh connection config""" 73 raise NotImplementedError
Converts target config to SQLMesh connection config
Inherited Members
- pydantic.main.BaseModel
- BaseModel
- parse_obj
- parse_raw
- parse_file
- from_orm
- construct
- copy
- schema
- schema_json
- validate
- update_forward_refs
82class DuckDbConfig(TargetConfig): 83 """ 84 Connection config for DuckDb target 85 86 Args: 87 path: Location of the database file. If not specified, an in memory database is used. 88 """ 89 90 type: str = "duckdb" 91 path: t.Optional[str] = None 92 93 def default_incremental_strategy(self, kind: IncrementalKind) -> str: 94 return "delete+insert" 95 96 def to_sqlmesh(self) -> ConnectionConfig: 97 return DuckDBConnectionConfig(database=self.path, concurrent_tasks=self.threads)
Connection config for DuckDb target
Arguments:
- path: Location of the database file. If not specified, an in memory database is used.
The default incremental strategy for the db
96 def to_sqlmesh(self) -> ConnectionConfig: 97 return DuckDBConnectionConfig(database=self.path, concurrent_tasks=self.threads)
Converts target config to SQLMesh connection config
Inherited Members
- pydantic.main.BaseModel
- BaseModel
- parse_obj
- parse_raw
- parse_file
- from_orm
- construct
- copy
- schema
- schema_json
- validate
- update_forward_refs
100class SnowflakeConfig(TargetConfig): 101 """ 102 Project connection and operational configuration for the Snowflake target 103 104 Args: 105 account: Snowflake account 106 warehouse: Name of the warehouse 107 database: Name of the database 108 user: Name of the user 109 password: User's password 110 role: Role of the user 111 client_session_keep_alive: A boolean flag to extend the duration of the Snowflake session beyond 4 hours 112 query_tag: tag for the query in Snowflake 113 connect_retries: Number of times to retry if the Snowflake connector encounters an error 114 connect_timeout: Number of seconds to wait between failed attempts 115 retry_on_database_errors: A boolean flag to retry if a Snowflake connector Database error is encountered 116 retry_all: A boolean flag to retry on all Snowflake connector errors 117 """ 118 119 # TODO add other forms of authentication 120 type: str = "snowflake" 121 account: str 122 warehouse: str 123 database: str 124 user: str 125 password: str 126 role: t.Optional[str] 127 client_session_keep_alive: bool = False 128 query_tag: t.Optional[str] 129 connect_retries: int = 0 130 connect_timeout: int = 10 131 retry_on_database_errors: bool = False 132 retry_all: bool = False 133 134 def default_incremental_strategy(self, kind: IncrementalKind) -> str: 135 return "merge" 136 137 def to_sqlmesh(self) -> ConnectionConfig: 138 return SnowflakeConnectionConfig( 139 user=self.user, 140 password=self.password, 141 account=self.account, 142 warehouse=self.warehouse, 143 database=self.database, 144 role=self.role, 145 concurrent_tasks=self.threads, 146 )
Project connection and operational configuration for the Snowflake target
Arguments:
- account: Snowflake account
- warehouse: Name of the warehouse
- database: Name of the database
- user: Name of the user
- password: User's password
- role: Role of the user
- client_session_keep_alive: A boolean flag to extend the duration of the Snowflake session beyond 4 hours
- query_tag: tag for the query in Snowflake
- connect_retries: Number of times to retry if the Snowflake connector encounters an error
- connect_timeout: Number of seconds to wait between failed attempts
- retry_on_database_errors: A boolean flag to retry if a Snowflake connector Database error is encountered
- retry_all: A boolean flag to retry on all Snowflake connector errors
The default incremental strategy for the db
137 def to_sqlmesh(self) -> ConnectionConfig: 138 return SnowflakeConnectionConfig( 139 user=self.user, 140 password=self.password, 141 account=self.account, 142 warehouse=self.warehouse, 143 database=self.database, 144 role=self.role, 145 concurrent_tasks=self.threads, 146 )
Converts target config to SQLMesh connection config
Inherited Members
- pydantic.main.BaseModel
- BaseModel
- parse_obj
- parse_raw
- parse_file
- from_orm
- construct
- copy
- schema
- schema_json
- validate
- update_forward_refs
149class PostgresConfig(TargetConfig): 150 """ 151 Project connection and operational configuration for the Postgres target 152 153 Args: 154 host: The Postgres host to connect to 155 user: Name of the user 156 password: User's password 157 port: The port to connect to 158 dbname: Name of the database 159 keepalives_idle: Seconds between TCP keepalive packets 160 connect_timeout: Number of seconds to wait between failed attempts 161 retries: Number of times to retry if the Postgres connector encounters an error 162 search_path: Overrides the default search path 163 role: Role of the user 164 sslmode: SSL Mode used to connect to the database 165 """ 166 167 type: str = "postgres" 168 host: str 169 user: str 170 password: str 171 port: int 172 dbname: str 173 keepalives_idle: int = 0 174 connect_timeout: int = 10 175 retries: int = 1 176 search_path: t.Optional[str] = None 177 role: t.Optional[str] = None 178 sslmode: t.Optional[str] = None 179 180 def default_incremental_strategy(self, kind: IncrementalKind) -> str: 181 return "delete+insert" if kind is IncrementalByUniqueKeyKind else "append" 182 183 def to_sqlmesh(self) -> ConnectionConfig: 184 raise ConfigError("PostgreSQL is not supported by SQLMesh yet.")
Project connection and operational configuration for the Postgres target
Arguments:
- host: The Postgres host to connect to
- user: Name of the user
- password: User's password
- port: The port to connect to
- dbname: Name of the database
- keepalives_idle: Seconds between TCP keepalive packets
- connect_timeout: Number of seconds to wait between failed attempts
- retries: Number of times to retry if the Postgres connector encounters an error
- search_path: Overrides the default search path
- role: Role of the user
- sslmode: SSL Mode used to connect to the database
180 def default_incremental_strategy(self, kind: IncrementalKind) -> str: 181 return "delete+insert" if kind is IncrementalByUniqueKeyKind else "append"
The default incremental strategy for the db
183 def to_sqlmesh(self) -> ConnectionConfig: 184 raise ConfigError("PostgreSQL is not supported by SQLMesh yet.")
Converts target config to SQLMesh connection config
Inherited Members
- pydantic.main.BaseModel
- BaseModel
- parse_obj
- parse_raw
- parse_file
- from_orm
- construct
- copy
- schema
- schema_json
- validate
- update_forward_refs
187class RedshiftConfig(TargetConfig): 188 """ 189 Project connection and operational configuration for the Redshift target 190 191 Args: 192 host: The Redshift host to connect to 193 user: Name of the user 194 password: User's password 195 port: The port to connect to 196 dbname: Name of the database 197 keepalives_idle: Seconds between TCP keepalive packets 198 connect_timeout: Number of seconds to wait between failed attempts 199 ra3_node: Enables cross-database sources 200 search_path: Overrides the default search path 201 sslmode: SSL Mode used to connect to the database 202 """ 203 204 # TODO add other forms of authentication 205 type: str = "redshift" 206 host: str 207 user: str 208 password: str 209 port: int 210 dbname: str 211 keepalives_idle: int = 240 212 connect_timeout: int = 10 213 ra3_node: bool = True 214 search_path: t.Optional[str] = None 215 sslmode: t.Optional[str] = None 216 217 def default_incremental_strategy(self, kind: IncrementalKind) -> str: 218 return "append" 219 220 def to_sqlmesh(self) -> ConnectionConfig: 221 return RedshiftConnectionConfig( 222 user=self.user, 223 password=self.password, 224 host=self.host, 225 port=self.port, 226 sslmode=self.sslmode, 227 timeout=self.connect_timeout, 228 concurrent_tasks=self.threads, 229 )
Project connection and operational configuration for the Redshift target
Arguments:
- host: The Redshift host to connect to
- user: Name of the user
- password: User's password
- port: The port to connect to
- dbname: Name of the database
- keepalives_idle: Seconds between TCP keepalive packets
- connect_timeout: Number of seconds to wait between failed attempts
- ra3_node: Enables cross-database sources
- search_path: Overrides the default search path
- sslmode: SSL Mode used to connect to the database
The default incremental strategy for the db
220 def to_sqlmesh(self) -> ConnectionConfig: 221 return RedshiftConnectionConfig( 222 user=self.user, 223 password=self.password, 224 host=self.host, 225 port=self.port, 226 sslmode=self.sslmode, 227 timeout=self.connect_timeout, 228 concurrent_tasks=self.threads, 229 )
Converts target config to SQLMesh connection config
Inherited Members
- pydantic.main.BaseModel
- BaseModel
- parse_obj
- parse_raw
- parse_file
- from_orm
- construct
- copy
- schema
- schema_json
- validate
- update_forward_refs
232class DatabricksConfig(TargetConfig): 233 """ 234 Project connection and operational configuration for the Databricks target 235 236 Args: 237 catalog: Catalog name to use for Unity Catalog 238 host: The Databricks host to connect to 239 http_path: The Databricks compute resources URL 240 token: Personal access token 241 """ 242 243 type: str = "databricks" 244 catalog: t.Optional[str] = None 245 host: str 246 http_path: str 247 token: str 248 249 def default_incremental_strategy(self, kind: IncrementalKind) -> str: 250 return "merge" 251 252 def to_sqlmesh(self) -> ConnectionConfig: 253 return DatabricksSQLConnectionConfig( 254 server_hostname=self.host, 255 http_path=self.http_path, 256 access_token=self.token, 257 concurrent_tasks=self.threads, 258 )
Project connection and operational configuration for the Databricks target
Arguments:
- catalog: Catalog name to use for Unity Catalog
- host: The Databricks host to connect to
- http_path: The Databricks compute resources URL
- token: Personal access token
The default incremental strategy for the db
252 def to_sqlmesh(self) -> ConnectionConfig: 253 return DatabricksSQLConnectionConfig( 254 server_hostname=self.host, 255 http_path=self.http_path, 256 access_token=self.token, 257 concurrent_tasks=self.threads, 258 )
Converts target config to SQLMesh connection config
Inherited Members
- pydantic.main.BaseModel
- BaseModel
- parse_obj
- parse_raw
- parse_file
- from_orm
- construct
- copy
- schema
- schema_json
- validate
- update_forward_refs