Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions docs/integrations/engines/redshift.md
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ pip install "sqlmesh[redshift]"
| `region` | The AWS region of the Amazon Redshift cluster | string | N |
| `cluster_identifier` | The cluster identifier of the Amazon Redshift cluster | string | N |
| `iam` | If IAM authentication is enabled. IAM must be True when authenticating using an IdP | dict | N |
| `db_user` | The database user to authenticate as. Required when using IAM authentication | string | N |
| `is_serverless` | If the Amazon Redshift cluster is serverless (Default: `False`) | bool | N |
| `serverless_acct_id` | The account ID of the serverless cluster | string | N |
| `serverless_work_group` | The name of work group for serverless end point | string | N |
Expand Down
3 changes: 3 additions & 0 deletions sqlmesh/core/config/connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -1310,6 +1310,7 @@ class RedshiftConnectionConfig(ConnectionConfig):
region: The AWS region where the Amazon Redshift cluster is located.
cluster_identifier: The cluster identifier of the Amazon Redshift cluster.
iam: If IAM authentication is enabled. Default value is False. IAM must be True when authenticating using an IdP.
db_user: The database user to authenticate as. Required when using IAM authentication.
is_serverless: Redshift end-point is serverless or provisional. Default value false.
serverless_acct_id: The account ID of the serverless. Default value None
serverless_work_group: The name of work group for serverless end point. Default value None.
Expand All @@ -1335,6 +1336,7 @@ class RedshiftConnectionConfig(ConnectionConfig):
region: t.Optional[str] = None
cluster_identifier: t.Optional[str] = None
iam: t.Optional[bool] = None
db_user: t.Optional[str] = None
is_serverless: t.Optional[bool] = None
serverless_acct_id: t.Optional[str] = None
serverless_work_group: t.Optional[str] = None
Expand Down Expand Up @@ -1372,6 +1374,7 @@ def _connection_kwargs_keys(self) -> t.Set[str]:
"region",
"cluster_identifier",
"iam",
"db_user",
"is_serverless",
"serverless_acct_id",
"serverless_work_group",
Expand Down
2 changes: 1 addition & 1 deletion tests/cli/test_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -1950,7 +1950,7 @@ def test_init_dbt_template(runner: CliRunner, tmp_path: Path):
@time_machine.travel(FREEZE_TIME)
def test_init_project_engine_configs(tmp_path):
engine_type_to_config = {
"redshift": "# concurrent_tasks: 4\n # register_comments: True\n # pre_ping: False\n # pretty_sql: False\n # schema_differ_overrides: \n # catalog_type_overrides: \n # user: \n # password: \n # database: \n # host: \n # port: \n # source_address: \n # unix_sock: \n # ssl: \n # sslmode: \n # timeout: \n # tcp_keepalive: \n # application_name: \n # preferred_role: \n # principal_arn: \n # credentials_provider: \n # region: \n # cluster_identifier: \n # iam: \n # is_serverless: \n # serverless_acct_id: \n # serverless_work_group: \n # enable_merge: ",
"redshift": "# concurrent_tasks: 4\n # register_comments: True\n # pre_ping: False\n # pretty_sql: False\n # schema_differ_overrides: \n # catalog_type_overrides: \n # user: \n # password: \n # database: \n # host: \n # port: \n # source_address: \n # unix_sock: \n # ssl: \n # sslmode: \n # timeout: \n # tcp_keepalive: \n # application_name: \n # preferred_role: \n # principal_arn: \n # credentials_provider: \n # region: \n # cluster_identifier: \n # iam: \n # db_user: \n # is_serverless: \n # serverless_acct_id: \n # serverless_work_group: \n # enable_merge: ",
"bigquery": "# concurrent_tasks: 1\n # register_comments: True\n # pre_ping: False\n # pretty_sql: False\n # schema_differ_overrides: \n # catalog_type_overrides: \n # method: oauth\n # project: \n # execution_project: \n # quota_project: \n # location: \n # keyfile: \n # keyfile_json: \n # token: \n # refresh_token: \n # client_id: \n # client_secret: \n # token_uri: \n # scopes: \n # impersonated_service_account: \n # job_creation_timeout_seconds: \n # job_execution_timeout_seconds: \n # job_retries: 1\n # job_retry_deadline_seconds: \n # priority: \n # maximum_bytes_billed: ",
"snowflake": "account: \n # concurrent_tasks: 4\n # register_comments: True\n # pre_ping: False\n # pretty_sql: False\n # schema_differ_overrides: \n # catalog_type_overrides: \n # user: \n # password: \n # warehouse: \n # database: \n # role: \n # authenticator: \n # token: \n # host: \n # port: \n # application: Tobiko_SQLMesh\n # private_key: \n # private_key_path: \n # private_key_passphrase: \n # session_parameters: ",
"databricks": "# concurrent_tasks: 1\n # register_comments: True\n # pre_ping: False\n # pretty_sql: False\n # schema_differ_overrides: \n # catalog_type_overrides: \n # server_hostname: \n # http_path: \n # access_token: \n # auth_type: \n # oauth_client_id: \n # oauth_client_secret: \n # catalog: \n # http_headers: \n # session_configuration: \n # databricks_connect_server_hostname: \n # databricks_connect_access_token: \n # databricks_connect_cluster_id: \n # databricks_connect_use_serverless: False\n # force_databricks_connect: False\n # disable_databricks_connect: False\n # disable_spark_session: False",
Expand Down